Skip to content

Commit

Permalink
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
Browse files Browse the repository at this point in the history
… del_final_state_pre_name2
  • Loading branch information
wanghuancoder committed Aug 26, 2022
2 parents 2ebf43e + efab2eb commit b516f08
Show file tree
Hide file tree
Showing 78 changed files with 1,460 additions and 462 deletions.
9 changes: 4 additions & 5 deletions cmake/external/brpc.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,8 @@ set(prefix_path
ExternalProject_Add(
extern_brpc
${EXTERNAL_PROJECT_LOG_ARGS}
# TODO(gongwb): change to de newst repo when they changed
GIT_REPOSITORY "https://github.com/wangjiawei04/brpc"
GIT_TAG "e203afb794caf027da0f1e0776443e7d20c0c28e"
GIT_REPOSITORY "https://github.com/apache/incubator-brpc"
GIT_TAG 1.2.0
PREFIX ${BRPC_PREFIX_DIR}
UPDATE_COMMAND ""
CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
Expand All @@ -60,8 +59,8 @@ ExternalProject_Add(
-DCMAKE_BUILD_TYPE=${THIRD_PARTY_BUILD_TYPE}
-DCMAKE_PREFIX_PATH=${prefix_path}
-DWITH_GLOG=ON
-DIOBUF_WITH_HUGE_BLOCK=ON
-DBRPC_WITH_RDMA=${WITH_BRPC_RDMA}
-DBUILD_BRPC_TOOLS=ON
-DBUILD_SHARED_LIBS=ON
${EXTERNAL_OPTIONAL_ARGS}
LIST_SEPARATOR |
CMAKE_CACHE_ARGS
Expand Down
10 changes: 10 additions & 0 deletions paddle/fluid/distributed/collective/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ if(WITH_NCCL OR WITH_RCCL)
DEPS processgroup place enforce collective_helper device_context
dense_tensor)
if(WITH_DISTRIBUTE AND WITH_PSCORE)
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 7.0)
set(DISTRIBUTE_COMPILE_FLAGS "${DISTRIBUTE_COMPILE_FLAGS} -faligned-new")
endif()
set_source_files_properties(
ProcessGroupHeter.cc PROPERTIES COMPILE_FLAGS ${DISTRIBUTE_COMPILE_FLAGS})
cc_library(
processgroup_heter
SRCS ProcessGroupHeter.cc NCCLTools.cc Common.cc
Expand All @@ -40,6 +45,11 @@ if(WITH_ASCEND_CL)
phi_api
eager_api)
if(WITH_DISTRIBUTE AND WITH_PSCORE)
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 7.0)
set(DISTRIBUTE_COMPILE_FLAGS "${DISTRIBUTE_COMPILE_FLAGS} -faligned-new")
endif()
set_source_files_properties(
ProcessGroupHeter.cc PROPERTIES COMPILE_FLAGS ${DISTRIBUTE_COMPILE_FLAGS})
cc_library(
processgroup_heter
SRCS ProcessGroupHeter.cc HCCLTools.cc Common.cc
Expand Down
4 changes: 3 additions & 1 deletion paddle/fluid/distributed/ps/service/heter_client.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,9 @@ class HeterClient {
options.connection_type = "";
VLOG(4) << "ssl enabled in arm";
#else
options.ssl_options.enable = need_encrypt;
if (need_encrypt) {
options.mutable_ssl_options();
}
#endif
client_channels = &peer_switch_channels_;
} else if (peer_role == PEER_ROLE_IS_WORKER) {
Expand Down
10 changes: 0 additions & 10 deletions paddle/fluid/distributed/ps/service/heter_server.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,8 @@ void HeterServer::StartHeterService(bool neeed_encrypt) {
server_.AddService(&service_, brpc::SERVER_DOESNT_OWN_SERVICE);
brpc::ServerOptions options;
if (neeed_encrypt) {
#ifdef PADDLE_WITH_ARM_BRPC
options.mutable_ssl_options()->default_cert.certificate = "/cert.pem";
options.mutable_ssl_options()->default_cert.private_key = "/key.pem";
#else
options.ssl_options.default_cert.certificate = "/cert.pem";
options.ssl_options.default_cert.private_key = "/key.pem";
#endif
}
if (server_.Start(endpoint_.c_str(), &options) != 0) {
VLOG(0) << "HeterServer start fail. Try again.";
Expand Down Expand Up @@ -72,13 +67,8 @@ void HeterServer::StartHeterInterService(bool neeed_encrypt) {
server_inter_.AddService(&service_, brpc::SERVER_DOESNT_OWN_SERVICE);
brpc::ServerOptions options;
if (neeed_encrypt) {
#ifdef PADDLE_WITH_ARM_BRPC
options.mutable_ssl_options()->default_cert.certificate = "/cert.pem";
options.mutable_ssl_options()->default_cert.private_key = "/key.pem";
#else
options.ssl_options.default_cert.certificate = "/cert.pem";
options.ssl_options.default_cert.private_key = "/key.pem";
#endif
}
if (server_inter_.Start(endpoint_inter_.c_str(), &options) != 0) {
VLOG(4) << "switch inter server start fail. Try again.";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def SkipAPIGeneration(forward_api_name):
"long": "CastPyArg2Long",
"int64_t": "CastPyArg2Long",
"float": "CastPyArg2Float",
"double": "CastPyArg2Double",
"std::string": "CastPyArg2String",
"std::vector<bool>": "CastPyArg2Booleans",
"std::vector<int>": "CastPyArg2Ints",
Expand Down
31 changes: 31 additions & 0 deletions paddle/fluid/framework/attribute.h
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,37 @@ struct ExtractAttribute<float> {
const std::string& attr_name_;
};

template <>
struct ExtractAttribute<double> {
explicit ExtractAttribute(const std::string& attr_name)
: attr_name_(attr_name) {}

double* operator()(Attribute& attr) const {
if (attr.type() == typeid(int)) { // NOLINT
int val = PADDLE_GET_CONST(int, attr);
attr = static_cast<double>(val);
} else if (attr.type() == typeid(int64_t)) { // NOLINT
int64_t val = PADDLE_GET_CONST(int64_t, attr);
attr = static_cast<double>(val);
} else if (attr.type() == typeid(float)) { // NOLINT
int64_t val = PADDLE_GET_CONST(float, attr);
attr = static_cast<double>(val);
}
double* attr_value = nullptr;
try {
attr_value = &paddle::get<double>(attr);
} catch (paddle::bad_variant_access const& bad_get) {
PADDLE_THROW(platform::errors::InvalidArgument(
"Cannot get attribute (%s) by type double, its type is %s.",
attr_name_,
paddle::platform::demangle(attr.type().name())));
}
return attr_value;
}

const std::string& attr_name_;
};

template <>
struct ExtractAttribute<std::vector<double>> {
explicit ExtractAttribute(const std::string& attr_name)
Expand Down
3 changes: 3 additions & 0 deletions paddle/fluid/framework/details/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ if(WITH_PSCORE)
set(DISTRIBUTE_COMPILE_FLAGS
"-Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor"
)
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 7.0)
set(DISTRIBUTE_COMPILE_FLAGS "${DISTRIBUTE_COMPILE_FLAGS} -faligned-new")
endif()
set_source_files_properties(
reduce_op_handle.cc PROPERTIES COMPILE_FLAGS ${DISTRIBUTE_COMPILE_FLAGS})
set_source_files_properties(
Expand Down
4 changes: 4 additions & 0 deletions paddle/fluid/framework/executor_cache.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ static ExecutionStrategy GetExecutionStrategy(const platform::Place &place) {
execution_strategy.num_threads_ = 1;
break;
}
case platform::DeviceType::NPU: {
execution_strategy.num_threads_ = 1;
break;
}
default:
PADDLE_THROW(platform::errors::Unavailable("Unsupported Device type %d.",
device_type));
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/framework/framework.proto
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ enum AttrType {
FLOAT64S = 12;
VAR = 13;
VARS = 14;
FLOAT64 = 15;
}

// OpDesc describes an instance of a C++ framework::OperatorBase
Expand All @@ -62,6 +63,7 @@ message OpDesc {
repeated double float64s = 16;
optional string var_name = 17;
repeated string vars_name = 18;
optional double float64 = 19;
};

message Var {
Expand Down
8 changes: 8 additions & 0 deletions paddle/fluid/framework/infershape_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -482,6 +482,10 @@ CompatInferMetaContext BuildInferMetaContext(InferShapeContext* ctx,
infer_meta_context.EmplaceBackAttr(
phi::Scalar(PADDLE_GET_CONST(float, attr)));
break;
case framework::proto::AttrType::FLOAT64:
infer_meta_context.EmplaceBackAttr(
phi::Scalar(PADDLE_GET_CONST(double, attr)));
break;
case framework::proto::AttrType::INT:
infer_meta_context.EmplaceBackAttr(
phi::Scalar(PADDLE_GET_CONST(int, attr)));
Expand Down Expand Up @@ -651,6 +655,10 @@ CompatInferMetaContext BuildInferMetaContext(InferShapeContext* ctx,
case phi::AttributeType::FLOAT32:
infer_meta_context.EmplaceBackAttr(PADDLE_GET_CONST(float, attr));
break;
case phi::AttributeType::FLOAT64:
infer_meta_context.EmplaceBackAttr(
PADDLE_GET_CONST(double, attr));
break;
case phi::AttributeType::INT32:
infer_meta_context.EmplaceBackAttr(PADDLE_GET_CONST(int, attr));
break;
Expand Down
7 changes: 7 additions & 0 deletions paddle/fluid/framework/op_desc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -668,6 +668,12 @@ void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
this->attrs_[name] = std::vector<float>();
break;
}
case proto::AttrType::FLOAT64S: {
VLOG(11) << "SetAttr: " << Type() << ", " << name
<< " from INTS to FLOAT64S";
this->attrs_[name] = std::vector<double>();
break;
}
case proto::AttrType::STRINGS: {
VLOG(11) << "SetAttr: " << Type() << ", " << name
<< " from INTS to STRINGS";
Expand Down Expand Up @@ -838,6 +844,7 @@ struct SetAttrDescVisitor {
mutable proto::OpDesc::Attr *attr_;
void operator()(int v) const { attr_->set_i(v); }
void operator()(float v) const { attr_->set_f(v); }
void operator()(double v) const { attr_->set_float64(v); }
void operator()(const std::string &v) const { attr_->set_s(v); }

// Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
Expand Down
8 changes: 8 additions & 0 deletions paddle/fluid/framework/operator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2745,6 +2745,10 @@ void OperatorWithKernel::BuildPhiKernelContext(
phi_kernel_context->EmplaceBackAttr(std::move(
phi::Scalar(PADDLE_GET_CONST(float, attr_iter->second))));
break;
case proto::AttrType::FLOAT64:
phi_kernel_context->EmplaceBackAttr(std::move(
phi::Scalar(PADDLE_GET_CONST(double, attr_iter->second))));
break;
case proto::AttrType::INT:
phi_kernel_context->EmplaceBackAttr(std::move(
phi::Scalar(PADDLE_GET_CONST(int, attr_iter->second))));
Expand Down Expand Up @@ -2884,6 +2888,10 @@ void OperatorWithKernel::BuildPhiKernelContext(
phi_kernel_context->EmplaceBackAttr(
PADDLE_GET_CONST(float, attr_iter->second));
break;
case phi::AttributeType::FLOAT64:
phi_kernel_context->EmplaceBackAttr(
PADDLE_GET_CONST(double, attr_iter->second));
break;
case phi::AttributeType::INT32:
phi_kernel_context->EmplaceBackAttr(
PADDLE_GET_CONST(int, attr_iter->second));
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/type_defs.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ using Attribute = paddle::variant<paddle::blank,
std::vector<int64_t>,
std::vector<double>,
VarDesc*,
std::vector<VarDesc*>>;
std::vector<VarDesc*>,
double>;
using AttributeMap = std::unordered_map<std::string, Attribute>;

#ifdef PADDLE_WITH_ASCEND_CL
Expand Down
7 changes: 7 additions & 0 deletions paddle/fluid/imperative/prepared_operator.h
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,10 @@ void BuildDygraphPhiKernelContext(const phi::KernelSignature& kernel_signature,
kernel_ctx->EmplaceBackAttr(
std::move(phi::Scalar(PADDLE_GET_CONST(float, attr))));
break;
case framework::proto::AttrType::FLOAT64:
kernel_ctx->EmplaceBackAttr(
std::move(phi::Scalar(PADDLE_GET_CONST(double, attr))));
break;
case framework::proto::AttrType::INT:
kernel_ctx->EmplaceBackAttr(
std::move(phi::Scalar(PADDLE_GET_CONST(int, attr))));
Expand Down Expand Up @@ -549,6 +553,9 @@ void BuildDygraphPhiKernelContext(const phi::KernelSignature& kernel_signature,
case phi::AttributeType::FLOAT32:
kernel_ctx->EmplaceBackAttr(PADDLE_GET_CONST(float, attr));
break;
case phi::AttributeType::FLOAT64:
kernel_ctx->EmplaceBackAttr(PADDLE_GET_CONST(double, attr));
break;
case phi::AttributeType::INT32:
kernel_ctx->EmplaceBackAttr(PADDLE_GET_CONST(int, attr));
break;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/operators/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ set(OP_HEADER_DEPS ${OP_HEADER_DEPS} phi phi_api_utils gather_scatter_kernel bac
register_operators(EXCLUDES py_layer_op py_func_op warpctc_op dgc_op load_combine_op lstm_op run_program_op eye_op quantize_linear_op
recurrent_op save_combine_op sparse_attention_op sync_batch_norm_op ${OP_MKL_DEPS} DEPS ${OP_HEADER_DEPS})

op_library(run_program_op SRCS run_program_op.cc run_program_op.cu.cc DEPS executor_cache ${OP_HEADER_DEPS})
op_library(run_program_op SRCS run_program_op.cc run_program_op.cu.cc run_program_op_npu.cc DEPS executor_cache ${OP_HEADER_DEPS})
target_link_libraries(run_program_op cuda_graph_with_memory_pool)
op_library(quantize_linear_op DEPS phi)
op_library(save_combine_op DEPS string_array)
Expand Down
40 changes: 34 additions & 6 deletions paddle/fluid/operators/conv_base_helper.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,13 @@ struct SearchAlgorithm {};
template <typename AlgoT>
struct SearchResult {
SearchResult() {}
explicit SearchResult(const phi::autotune::DnnNode& node)
: algo(static_cast<AlgoT>(node.algo)),
workspace_size(node.workspace_size) {}

explicit SearchResult(AlgoT a) : algo(a) {}
explicit SearchResult(AlgoT a, float t, size_t size)
: algo(a), time(t), workspace_size(size) {}

AlgoT algo = static_cast<AlgoT>(0);
float time = -1.f;
Expand Down Expand Up @@ -76,28 +82,50 @@ struct ConvArgsBase {
// dilations
std::vector<int> d;

// groups
int group;

// data foramt
DataLayout data_layout;

ConvArgsBase(const framework::Tensor* x,
const framework::Tensor* w,
const framework::Tensor* o,
const std::vector<int> s,
const std::vector<int> p,
const std::vector<int> d,
DataT dtype)
: x(x), w(w), o(o), s(s), p(p), d(d), cudnn_dtype(dtype) {}
DataT dtype,
int g,
DataLayout layout)
: x(x),
w(w),
o(o),
s(s),
p(p),
d(d),
cudnn_dtype(dtype),
group(g),
data_layout(layout) {}

template <typename T>
size_t GetCacheKey() const {
phi::autotune::ConvCacheKey Convert2ConvCacheKey() const {
auto x_shape = phi::vectorize(x->dims());
auto w_shape = phi::vectorize(w->dims());
VLOG(10) << "[ConvArgs] x_dims=" << x_shape << ", w_dims=" << w_shape
<< ", strides=" << s << ", paddings=" << p << ", dilations=" << d;
return phi::autotune::ConvKey(
<< ", strides=" << s << ", paddings=" << p << ", dilations=" << d
<< ",data= " << paddle::experimental::CppTypeToDataType<T>::Type()
<< ", group=" << group
<< ", data layout=" << static_cast<int64_t>(data_layout);

return phi::autotune::ConvCacheKey(
x_shape,
w_shape,
p,
s,
d,
paddle::experimental::CppTypeToDataType<T>::Type());
paddle::experimental::CppTypeToDataType<T>::Type(),
group,
static_cast<int64_t>(data_layout));
}
};

Expand Down

0 comments on commit b516f08

Please sign in to comment.