Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Eager] delete final state pre-name #45306

Merged
merged 58 commits into from Aug 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
58 commits
Select commit Hold shift + click to select a range
be11e2e
sync_batch_norm_backword_yaml
wanghuancoder Aug 17, 2022
954cc4a
norm api use final state
wanghuancoder Aug 17, 2022
cb9efff
adadelta use final state
wanghuancoder Aug 17, 2022
7f9bd47
refine
wanghuancoder Aug 18, 2022
7d08798
adamax use final state
wanghuancoder Aug 18, 2022
0108118
strided_slice use final statue
wanghuancoder Aug 18, 2022
82fea45
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 18, 2022
4e023bf
parameters_to_vector vector_to_parameters use final state
wanghuancoder Aug 18, 2022
2b0746b
refine
wanghuancoder Aug 18, 2022
160b43f
refine
wanghuancoder Aug 18, 2022
d300fff
refine
wanghuancoder Aug 18, 2022
b6a2b6b
orthogonal use final state
wanghuancoder Aug 18, 2022
88ee999
refine
wanghuancoder Aug 18, 2022
e9519d3
refine
wanghuancoder Aug 18, 2022
5ab1b87
pad use final state
wanghuancoder Aug 18, 2022
cb78466
size use final statue
wanghuancoder Aug 18, 2022
991f764
loss.py reshape gather_nd use final state
wanghuancoder Aug 19, 2022
42ca81b
some op,use final state
wanghuancoder Aug 19, 2022
3bba1d2
uniform_random use final state
wanghuancoder Aug 19, 2022
1a98e68
Merge branch 'develop' into norm_use_final_state
wanghuancoder Aug 22, 2022
b0d4665
refine
wanghuancoder Aug 22, 2022
5131dbf
Merge branch 'norm_use_final_state' of https://github.com/wanghuancod…
wanghuancoder Aug 22, 2022
6c6b140
refine
wanghuancoder Aug 22, 2022
3c2cf23
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 22, 2022
47cdfa8
delete final state pre name
wanghuancoder Aug 22, 2022
316784a
merge
wanghuancoder Aug 22, 2022
e121600
refine
wanghuancoder Aug 22, 2022
d569558
merge
wanghuancoder Aug 22, 2022
159cc62
refine
wanghuancoder Aug 22, 2022
dc468e0
refine
wanghuancoder Aug 22, 2022
669e832
refine
wanghuancoder Aug 22, 2022
68077a7
refine
wanghuancoder Aug 22, 2022
c3e54f8
refine
wanghuancoder Aug 22, 2022
403e032
refine
wanghuancoder Aug 22, 2022
afa43d4
refine
wanghuancoder Aug 23, 2022
3c713fc
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 23, 2022
9dd9e94
refine
wanghuancoder Aug 23, 2022
3a28ce5
refine
wanghuancoder Aug 23, 2022
484e8c0
refine
wanghuancoder Aug 23, 2022
1dbfbaa
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 23, 2022
5599093
refine
wanghuancoder Aug 23, 2022
caa2be9
refine
wanghuancoder Aug 24, 2022
828b321
refine
wanghuancoder Aug 24, 2022
a3b16b4
refine
wanghuancoder Aug 24, 2022
2a6f7d7
refine
wanghuancoder Aug 25, 2022
952bce9
merge
wanghuancoder Aug 25, 2022
0cdc1e1
merge
wanghuancoder Aug 25, 2022
3e881ef
refine
wanghuancoder Aug 25, 2022
5761375
refine
wanghuancoder Aug 25, 2022
585f1f9
refine
wanghuancoder Aug 25, 2022
ba87d67
refine
wanghuancoder Aug 25, 2022
afa0914
refine
wanghuancoder Aug 25, 2022
51afbda
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 25, 2022
2ebf43e
merge
wanghuancoder Aug 25, 2022
b516f08
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
wanghuancoder Aug 26, 2022
c0ecbe1
refine
wanghuancoder Aug 26, 2022
d0bb790
refine
wanghuancoder Aug 26, 2022
a2a0f3a
refine
wanghuancoder Aug 26, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 3 additions & 3 deletions .gitignore
Expand Up @@ -66,14 +66,14 @@ paddle/infrt/dialect/pd/common/pd_ops_info.h
paddle/infrt/tests/dialect/Output
paddle/infrt/tests/lit.cfg.py
paddle/infrt/kernel/phi/infershaped/infershaped_kernel_launchers.cc
paddle/fluid/pybind/eager_final_state_op_function.cc
paddle/fluid/pybind/eager_op_function.cc

# these files (directories) are generated before build system generation
paddle/fluid/operators/generated_op.cc
paddle/phi/ops/compat/generated_sig.cc
paddle/phi/api/yaml/parsed_apis/
python/paddle/utils/code_gen/
paddle/fluid/pybind/tmp_eager_final_state_op_function_impl.h
paddle/fluid/pybind/eager_final_state_op_function_impl.h
paddle/fluid/pybind/tmp_eager_op_function_impl.h
paddle/fluid/pybind/eager_op_function_impl.h
paddle/fluid/pybind/eager_op_function_impl.h
paddle/fluid/pybind/op_function_impl.h
2 changes: 1 addition & 1 deletion paddle/fluid/eager/CMakeLists.txt
Expand Up @@ -41,7 +41,7 @@ if(NOT ((NOT WITH_PYTHON) AND ON_INFER))
grad_tensor_holder
SRCS grad_tensor_holder.cc
DEPS grad_node_info gradient_accumulator)
add_dependencies(grad_tensor_holder eager_final_state_codegen)
add_dependencies(grad_tensor_holder eager_codegen)
cc_library(
backward
SRCS backward.cc
Expand Down
Expand Up @@ -8,5 +8,5 @@ if(NOT (NOT WITH_PYTHON AND ON_INFER))
final_dygraph_node
SRCS nodes.cc ${eager_manual_nodes}
DEPS ${eager_deps})
add_dependencies(final_dygraph_node eager_final_state_codegen)
add_dependencies(final_dygraph_node eager_codegen)
endif()
Expand Up @@ -8,5 +8,5 @@ if(NOT (NOT WITH_PYTHON AND ON_INFER))
final_dygraph_function
SRCS dygraph_functions.cc ${eager_manual_functions}
DEPS ${eager_deps})
add_dependencies(final_dygraph_function eager_final_state_codegen)
add_dependencies(final_dygraph_function eager_codegen)
endif()
Expand Up @@ -16,10 +16,10 @@

#include "paddle/phi/api/include/tensor.h"

paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::experimental::Tensor add_n_dygraph_function(
const std::vector<paddle::experimental::Tensor>& x);

paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::experimental::Tensor conv2d_dygraph_function(
const paddle::experimental::Tensor& input,
const paddle::experimental::Tensor& filter,
std::vector<int> strides,
Expand Down
Expand Up @@ -23,7 +23,7 @@
#pragma GCC diagnostic ignored "-Wunused-variable"
DECLARE_bool(check_nan_inf);

paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::experimental::Tensor add_n_dygraph_function(
const std::vector<paddle::experimental::Tensor>& x) {
// Dygraph Record Event
paddle::platform::RecordEvent dygraph_entrance_record_event(
Expand All @@ -46,7 +46,7 @@ paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::imperative::AutoCastGuard guard(
egr::Controller::Instance().GetCurrentTracer(),
paddle::imperative::AmpLevel::O0);
return add_n_final_state_dygraph_function(NEW_x);
return add_n_dygraph_function(NEW_x);
}
}

Expand All @@ -56,7 +56,7 @@ paddle::experimental::Tensor add_n_final_state_dygraph_function(
std::vector<egr::AutogradMeta*>* x_autograd_meta = &x_autograd_meta_vec;
// Forward API Call
VLOG(3) << "Final State Running: "
<< "add_n_final_state_dygraph_function";
<< "add_n_dygraph_function";
auto api_result = paddle::experimental::add_n(x);
// Check NaN and Inf if needed
if (FLAGS_check_nan_inf) {
Expand Down
Expand Up @@ -23,7 +23,7 @@
#pragma GCC diagnostic ignored "-Wunused-variable"
DECLARE_bool(check_nan_inf);

paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::experimental::Tensor conv2d_dygraph_function(
const paddle::experimental::Tensor& input,
const paddle::experimental::Tensor& filter,
std::vector<int> strides,
Expand Down Expand Up @@ -59,17 +59,17 @@ paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::imperative::AutoCastGuard guard(
egr::Controller::Instance().GetCurrentTracer(),
paddle::imperative::AmpLevel::O0);
return conv2d_final_state_dygraph_function(NEW_input,
NEW_filter,
strides,
paddings,
paddding_algorithm,
groups,
dilations,
data_format,
use_addto,
workspace_size_MB,
exhaustive_search);
return conv2d_dygraph_function(NEW_input,
NEW_filter,
strides,
paddings,
paddding_algorithm,
groups,
dilations,
data_format,
use_addto,
workspace_size_MB,
exhaustive_search);
}
}

Expand All @@ -80,7 +80,7 @@ paddle::experimental::Tensor conv2d_final_state_dygraph_function(
egr::EagerUtils::nullable_autograd_meta(filter);
// Forward API Call
VLOG(3) << "Final State Running: "
<< "conv2d_final_state_dygraph_function";
<< "conv2d_dygraph_function";
auto api_result = paddle::experimental::conv2d(input,
filter,
strides,
Expand Down
Expand Up @@ -64,8 +64,8 @@ AddNGradNodeFinal::operator()(

// dygraph function
for (size_t i = 0; i < returns[0].size(); i++) {
returns[0][i] = ::scale_final_state_dygraph_function(
out_grad, phi::Scalar(1.0), 0.0, true);
returns[0][i] =
::scale_dygraph_function(out_grad, phi::Scalar(1.0), 0.0, true);
}

// Check NaN and Inf id needed
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/eager/auto_code_generator/CMakeLists.txt
@@ -1,4 +1,4 @@
add_subdirectory(final_state_generator)
add_subdirectory(generator)

set(EAGER_GENERETOR_DEPS
${GLOB_OP_LIB}
Expand Down Expand Up @@ -88,7 +88,7 @@ if(WIN32)
endif()

add_custom_target(
eager_codegen
legacy_eager_codegen
COMMAND
"${eager_generator_path}/eager_generator.exe"
"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated"
Expand All @@ -97,7 +97,7 @@ if(WIN32)
VERBATIM)
else()
add_custom_target(
eager_codegen
legacy_eager_codegen
COMMAND
${CMAKE_COMMAND} -E env
"LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}:${CMAKE_CURRENT_BINARY_DIR}/../../pybind"
Expand Down
73 changes: 37 additions & 36 deletions paddle/fluid/eager/auto_code_generator/eager_generator.cc
Expand Up @@ -37,11 +37,11 @@ namespace framework {

// To handle append_op at python-level
std::unordered_map<std::string, std::vector<std::string>>
core_ops_returns_info = {};
std::unordered_map<std::string, std::vector<std::string>> core_ops_args_info =
{};
core_ops_legacy_returns_info = {};
std::unordered_map<std::string, std::vector<std::string>>
core_ops_args_type_info = {};
core_ops_legacy_args_info = {};
std::unordered_map<std::string, std::vector<std::string>>
core_ops_legacy_args_type_info = {};

/* --- Static maps to handle corner cases --- */
static std::unordered_map<std::string, paddle::framework::AttributeMap>
Expand Down Expand Up @@ -1473,10 +1473,10 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(

std::string dygraph_function_args_str = "";
std::string amp_function_call_args_str = "";
core_ops_args_info[op_type] = {};
core_ops_args_type_info[op_type] = {};
core_ops_args_info[op_type].resize(in_vars.size());
core_ops_args_type_info[op_type].resize(in_vars.size());
core_ops_legacy_args_info[op_type] = {};
core_ops_legacy_args_type_info[op_type] = {};
core_ops_legacy_args_info[op_type].resize(in_vars.size());
core_ops_legacy_args_type_info[op_type].resize(in_vars.size());

/* ------ Dygraph forward function generation ------ */
generated_function_body += " // Dygraph Forward Pass\n";
Expand All @@ -1500,7 +1500,7 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
amp_function_call_args_str_list[input_position] =
" NEW_" + LegalizeVarName(input_name);

core_ops_args_type_info[op_type][input_position] = "list";
core_ops_legacy_args_type_info[op_type][input_position] = "list";
} else {
// inplace tensor can't be const
const char* FWD_INS_ARG_TEMPLATE;
Expand All @@ -1522,9 +1522,9 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
amp_function_call_args_str_list[input_position] =
" NEW_" + LegalizeVarName(input_name);

core_ops_args_type_info[op_type][input_position] = "tensor";
core_ops_legacy_args_type_info[op_type][input_position] = "tensor";
}
core_ops_args_info[op_type][input_position] = input_name;
core_ops_legacy_args_info[op_type][input_position] = input_name;

if (input.dispensable()) continue;

Expand Down Expand Up @@ -1666,15 +1666,15 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
dygraph_function_args_str += arg_str;
amp_function_call_args_str += (", " + LegalizeVarName(output_var_name));

core_ops_args_type_info[op_type].push_back("list");
core_ops_legacy_args_type_info[op_type].push_back("list");
} else {
const char* FWD_NUM_ARG_TEMPLATE = ", paddle::experimental::Tensor* %s";
std::string arg_str = paddle::string::Sprintf(
FWD_NUM_ARG_TEMPLATE, LegalizeVarName(output_var_name));
dygraph_function_args_str += arg_str;
amp_function_call_args_str += (", " + LegalizeVarName(output_var_name));

core_ops_args_type_info[op_type].push_back("tensor");
core_ops_legacy_args_type_info[op_type].push_back("tensor");
}

if (BeSameAsInput(output_name, input_names)) {
Expand All @@ -1693,7 +1693,7 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
output_name,
LegalizeVarName(output_var_name));
}
core_ops_args_info[op_type].push_back(output_name);
core_ops_legacy_args_info[op_type].push_back(output_name);

} else if (!forward_inplace_map.empty() &&
forward_inplace_map.count(output_name)) {
Expand Down Expand Up @@ -1727,8 +1727,8 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
"{ \"%s\", egr::EagerUtils::CreateVars(%s) },";
outs_contents_str += paddle::string::Sprintf(
FWD_OUTS_CONTENT_TEMPLATE, output_name, outnum);
core_ops_args_info[op_type].push_back(outnum);
core_ops_args_type_info[op_type].push_back("int");
core_ops_legacy_args_info[op_type].push_back(outnum);
core_ops_legacy_args_type_info[op_type].push_back("int");
} else {
const char* FWD_OUTS_CONTENT_TEMPLATE =
"{ \"%s\", "
Expand Down Expand Up @@ -2003,10 +2003,11 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
VLOG(6) << "Converted Output VarBase to EagerVariable(s)";
/* ------ END Generate TraceOp ----- */

// [Generation] Handle core_ops_returns_info
// avoid inplace op changing core_ops_returns_info
if (core_ops_returns_info.empty() || !core_ops_returns_info.count(op_type)) {
core_ops_returns_info[op_type] = return_contents;
// [Generation] Handle core_ops_legacy_returns_info
// avoid inplace op changing core_ops_legacy_returns_info
if (core_ops_legacy_returns_info.empty() ||
!core_ops_legacy_returns_info.count(op_type)) {
core_ops_legacy_returns_info[op_type] = return_contents;
}

// [Generation] ComputeRequireGrad -> GradNodeCreation
Expand Down Expand Up @@ -2983,13 +2984,13 @@ static std::string GenerateDygraphHFileIncludes() {

dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_info;\n";
"core_ops_legacy_args_info;\n";
dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_type_info;\n";
"core_ops_legacy_args_type_info;\n";
dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_returns_info;\n\n";
"core_ops_legacy_returns_info;\n\n";

return dygraph_forward_api_includes_str;
}
Expand Down Expand Up @@ -3060,7 +3061,7 @@ static void GenerateNodeCCFile(const std::string& node_cc_path,
static std::string ConvertCoreOpsInfosToString(
const std::unordered_map<std::string, std::vector<std::string>>&
core_ops_info) {
std::string core_ops_returns_info_init_str = "";
std::string core_ops_legacy_returns_info_init_str = "";
for (const auto& iter : core_ops_info) {
const char* Core_Ops_Returns_TEMPLATE = "{ \"%s\", { %s } },\n";
const std::string& op_type = iter.first;
Expand All @@ -3074,23 +3075,23 @@ static std::string ConvertCoreOpsInfosToString(
if (returns_str.size() > 0) returns_str.pop_back();
std::string op_type_init_str = paddle::string::Sprintf(
Core_Ops_Returns_TEMPLATE, op_type, returns_str);
core_ops_returns_info_init_str += op_type_init_str;
core_ops_legacy_returns_info_init_str += op_type_init_str;
}

// Remove trailing ','
if (core_ops_returns_info_init_str.size() > 0)
core_ops_returns_info_init_str.pop_back();
if (core_ops_legacy_returns_info_init_str.size() > 0)
core_ops_legacy_returns_info_init_str.pop_back();

return core_ops_returns_info_init_str;
return core_ops_legacy_returns_info_init_str;
}

static std::string GenerateCoreOpsArgsInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_info = { %s };\n";
"core_ops_legacy_args_info = { %s };\n";

std::string core_ops_args_info_init_str =
ConvertCoreOpsInfosToString(core_ops_args_info);
ConvertCoreOpsInfosToString(core_ops_legacy_args_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_args_info_init_str);
Expand All @@ -3101,10 +3102,10 @@ static std::string GenerateCoreOpsArgsInfo() {
static std::string GenerateCoreOpsArgsTypeInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_type_info = { %s };\n";
"core_ops_legacy_args_type_info = { %s };\n";

std::string core_ops_args_type_info_init_str =
ConvertCoreOpsInfosToString(core_ops_args_type_info);
ConvertCoreOpsInfosToString(core_ops_legacy_args_type_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_args_type_info_init_str);
Expand All @@ -3115,13 +3116,13 @@ static std::string GenerateCoreOpsArgsTypeInfo() {
static std::string GenerateCoreOpsReturnsInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_returns_info = { %s };\n";
"core_ops_legacy_returns_info = { %s };\n";

std::string core_ops_returns_info_init_str =
ConvertCoreOpsInfosToString(core_ops_returns_info);
std::string core_ops_legacy_returns_info_init_str =
ConvertCoreOpsInfosToString(core_ops_legacy_returns_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_returns_info_init_str);
Core_Ops_Returns_MAP_TEMPLATE, core_ops_legacy_returns_info_init_str);

return core_ops_info_str;
}
Expand Down
Expand Up @@ -124,7 +124,7 @@ def GenerateFileStructureForIntermediateDygraph(eager_dir, split_count):
".tmp.cc\" \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/nodes/nodes"
+ str(i + 1) + ".cc\"\n")

f.write(" DEPENDS eager_codegen\n")
f.write(" DEPENDS legacy_eager_codegen\n")
f.write(" VERBATIM)\n")

f.write("cc_library(dygraph_node SRCS ")
Expand Down Expand Up @@ -154,7 +154,7 @@ def GenerateFileStructureForIntermediateDygraph(eager_dir, split_count):
f.write(
" COMMAND ${CMAKE_COMMAND} -E copy_if_different \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/forwards/dygraph_forward_functions_returns_info.tmp.cc\" \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/forwards/dygraph_forward_functions_returns_info.cc\"\n"
)
f.write(" DEPENDS eager_codegen\n")
f.write(" DEPENDS legacy_eager_codegen\n")
f.write(" VERBATIM)\n")

f.write("cc_library(dygraph_function SRCS ")
Expand Down