Skip to content

Commit

Permalink
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
Browse files Browse the repository at this point in the history
… optimizer_base
  • Loading branch information
longranger2 committed Aug 11, 2023
2 parents 2f80f80 + bfc6480 commit 0cc754a
Show file tree
Hide file tree
Showing 455 changed files with 9,789 additions and 2,404 deletions.
8 changes: 2 additions & 6 deletions cmake/cinn.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ if(WITH_MKL)
add_definitions(-DCINN_WITH_MKL_CBLAS)
endif()
if(WITH_MKLDNN)
add_definitions(-DCINN_WITH_MKLDNN)
add_definitions(-DCINN_WITH_DNNL)
endif()

if(WITH_GPU)
Expand Down Expand Up @@ -326,12 +326,8 @@ set(CINN_LIB "${CINN_LIB_LOCATION}/${CINN_LIB_NAME}")
# Add CINN's dependencies header files
######################################

# Add absl
set(ABSL_INCLUDE_DIR "${CMAKE_BINARY_DIR}/dist/third_party/absl/include")
include_directories(${ABSL_INCLUDE_DIR})

# Add isl
set(ISL_INCLUDE_DIR "${CMAKE_BINARY_DIR}/dist/third_party/isl/include")
set(ISL_INCLUDE_DIR "${CMAKE_BINARY_DIR}/third_party/install/isl/include")
include_directories(${ISL_INCLUDE_DIR})

# Add LLVM
Expand Down
27 changes: 22 additions & 5 deletions cmake/cinn/external/absl.cmake
Original file line number Diff line number Diff line change
@@ -1,13 +1,31 @@
include(ExternalProject)

set(ABSL_SOURCES_DIR ${THIRD_PARTY_PATH}/absl)
set(ABSL_SOURCES_DIR ${PADDLE_SOURCE_DIR}/third_party/absl)
set(ABSL_INSTALL_DIR ${THIRD_PARTY_PATH}/install/absl)

set(ABSL_PREFIX_DIR ${THIRD_PARTY_PATH}/absl)
set(ABSL_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})

set(ABSL_REPOSITORY "https://github.com/abseil/abseil-cpp.git")
set(ABSL_TAG "20210324.2")

if(NOT EXISTS ${ABSL_SOURCES_DIR})
message(
STATUS "Download absl source from ${ABSL_REPOSITORY} to ABSL_SOURCES_DIR")
execute_process(COMMAND ${GIT_EXECUTABLE} clone -b ${ABSL_TAG}
${ABSL_REPOSITORY} ${ABSL_SOURCES_DIR})
else()
# check git tag
execute_process(
COMMAND ${GIT_EXECUTABLE} -C ${ABSL_SOURCES_DIR} describe --tags
OUTPUT_VARIABLE CURRENT_TAG
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(NOT ${CURRENT_TAG} STREQUAL ${ABSL_TAG})
message(STATUS "Checkout absl to ${ABSL_TAG}")
execute_process(COMMAND ${GIT_EXECUTABLE} -C ${ABSL_SOURCES_DIR} checkout
-q ${ABSL_TAG})
endif()
endif()

set(OPTIONAL_ARGS
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
Expand All @@ -22,9 +40,8 @@ ExternalProject_Add(
external_absl
${EXTERNAL_PROJECT_LOG_ARGS}
DEPENDS gflags
GIT_REPOSITORY ${ABSL_REPOSITORY}
GIT_TAG ${ABSL_TAG}
PREFIX ${ABSL_SOURCES_DIR}
PREFIX ${ABSL_PREFIX_DIR}
SOURCE_DIR ${ABSL_SOURCES_DIR}
UPDATE_COMMAND ""
CMAKE_ARGS ${OPTIONAL_ARGS}
-DCMAKE_INSTALL_PREFIX=${ABSL_INSTALL_DIR}
Expand Down
55 changes: 46 additions & 9 deletions cmake/cinn/external/isl.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,62 @@ include(ExternalProject)
# static build
# CPPFLAGS="-fPIC -DPIC" ./configure --with-gmp-prefix=<gmp-install-path> --with-clang-prefix=<llvm-install-path> --enable-shared=no --enable-static=yes

set(ISL_FILE
"isl-6a1760fe.tar.gz"
CACHE STRING "" FORCE)
set(ISL_DOWNLOAD_URL
https://paddle-inference-dist.bj.bcebos.com/CINN/isl-6a1760fe.tar.gz)
set(ISL_MD5 fff10083fb79d394b8a7b7b2089f6183)
"https://paddle-inference-dist.bj.bcebos.com/CINN/${ISL_FILE}")
set(ISL_URL_MD5 fff10083fb79d394b8a7b7b2089f6183)
set(ISL_DOWNLOAD_DIR ${PADDLE_SOURCE_DIR}/third_party/isl)
set(ISL_PREFIX_DIR ${THIRD_PARTY_PATH}/isl)
set(ISL_INSTALL_DIR ${THIRD_PARTY_PATH}/install/isl)

function(download_isl)
message(
STATUS "Downloading ${ISL_DOWNLOAD_URL} to ${ISL_DOWNLOAD_DIR}/${ISL_FILE}")
file(
DOWNLOAD ${ISL_DOWNLOAD_URL} ${ISL_DOWNLOAD_DIR}/${ISL_FILE}
EXPECTED_MD5 ${ISL_URL_MD5}
STATUS ERR)
if(ERR EQUAL 0)
message(STATUS "Download ${ISL_FILE} success")
else()
message(
FATAL_ERROR
"Download failed, error: ${ERR}\n You can try downloading ${ISL_FILE} again"
)
endif()
endfunction()

# Download and check isl.
if(EXISTS ${ISL_DOWNLOAD_DIR}/${ISL_FILE})
file(MD5 ${ISL_DOWNLOAD_DIR}/${ISL_FILE} ISL_MD5)
if(NOT ISL_MD5 STREQUAL ISL_URL_MD5)
# clean build file
file(REMOVE_RECURSE ${ISL_PREFIX_DIR})
file(REMOVE_RECURSE ${ISL_INSTALL_DIR})
download_isl()
endif()
else()
download_isl()
endif()

ExternalProject_Add(
external_isl
${EXTERNAL_PROJECT_LOG_ARGS}
URL ${ISL_DOWNLOAD_URL}
URL_MD5 ${ISL_MD5}
PREFIX ${THIRD_PARTY_PATH}/isl
SOURCE_DIR ${THIRD_PARTY_PATH}/install/isl
URL ${ISL_DOWNLOAD_DIR}/${ISL_FILE}
URL_MD5 ${ISL_URL_MD5}
DOWNLOAD_DIR ${ISL_DOWNLOAD_DIR}
PREFIX ${ISL_PREFIX_DIR}
SOURCE_DIR ${ISL_INSTALL_DIR}
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
UPDATE_COMMAND ""
INSTALL_COMMAND ""
BUILD_BYPRODUCTS ${THIRD_PARTY_PATH}/install/isl/lib/libisl.a)
BUILD_BYPRODUCTS ${ISL_INSTALL_DIR}/lib/libisl.a)

add_library(isl STATIC IMPORTED GLOBAL)
set_property(TARGET isl PROPERTY IMPORTED_LOCATION
${THIRD_PARTY_PATH}/install/isl/lib/libisl.a)
${ISL_INSTALL_DIR}/lib/libisl.a)
add_dependencies(isl external_isl)
include_directories(${THIRD_PARTY_PATH}/install/isl/include)
include_directories(${ISL_INSTALL_DIR}/include)
2 changes: 1 addition & 1 deletion cmake/external/mkldnn.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ ExternalProject_Add(
BUILD_BYPRODUCTS ${BUILD_BYPRODUCTS_ARGS})

message(STATUS "MKLDNN library: ${MKLDNN_LIB}")
add_definitions(-DPADDLE_WITH_MKLDNN)
add_definitions(-DPADDLE_WITH_DNNL)
# copy the real so.0 lib to install dir
# it can be directly contained in wheel or capi
if(WIN32)
Expand Down
2 changes: 1 addition & 1 deletion cmake/external/xpu.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ set(XPU_XFT_LIB_NAME "libxft.so")
set(XPU_XPTI_LIB_NAME "libxpti.so")

if(NOT DEFINED XPU_BASE_DATE)
set(XPU_BASE_DATE "20230807")
set(XPU_BASE_DATE "20230810")
endif()
set(XPU_XCCL_BASE_VERSION "1.0.53.6")
if(NOT DEFINED XPU_XFT_BASE_VERSION)
Expand Down
2 changes: 1 addition & 1 deletion cmake/third_party.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ if(WITH_CINN)
add_definitions(-DCINN_WITH_MKL_CBLAS)
endif()
if(WITH_MKLDNN)
add_definitions(-DCINN_WITH_MKLDNN)
add_definitions(-DCINN_WITH_DNNL)
endif()
include(cmake/cinn/version.cmake)
if(NOT EXISTS ${CMAKE_BINARY_DIR}/cmake/cinn/config.cmake)
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/hlir/framework/instruction.cc
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ void Instruction::Run(
// }
}

std::string Instruction::DumpInstruction() {
std::string Instruction::DumpInstruction() const {
std::stringstream ss;
ss << "Instruction {" << std::endl;
for (size_t i = 0; i < fn_names_.size(); ++i) {
Expand Down
12 changes: 8 additions & 4 deletions paddle/cinn/hlir/framework/instruction.h
Original file line number Diff line number Diff line change
Expand Up @@ -132,13 +132,17 @@ class Instruction {

int size() { return fn_ptrs_.size(); }

std::string DumpInstruction();
std::string DumpInstruction() const;

std::vector<std::vector<std::string>> GetInArgs() { return in_args_; }
std::vector<std::vector<std::string>> GetOutArgs() { return out_args_; }
const std::vector<std::vector<std::string>>& GetInArgs() const {
return in_args_;
}
const std::vector<std::vector<std::string>>& GetOutArgs() const {
return out_args_;
}
void ClearInArgs() { in_args_.clear(); }
void ClearOutArgs() { out_args_.clear(); }
std::vector<std::string> GetFnNames() { return fn_names_; }
const std::vector<std::string>& GetFnNames() const { return fn_names_; }
void AddInArgs(const std::vector<std::string>& in_args) {
in_args_.push_back(in_args);
}
Expand Down
6 changes: 3 additions & 3 deletions paddle/cinn/hlir/framework/program.cc
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,9 @@ void Program::Export(const std::vector<std::string>& persistent_vars,
int instplaceholder = writeplaceholder(4 * 3, insnum, f);
int findex = 0;
for (auto& ins : instrs_) {
auto in_args = ins->GetInArgs();
auto out_args = ins->GetOutArgs();
auto fn_names = ins->GetFnNames();
auto& in_args = ins->GetInArgs();
auto& out_args = ins->GetOutArgs();
auto& fn_names = ins->GetFnNames();
for (int i = 0; i < fn_names.size(); i++, findex++) {
std::vector<std::string> all_args(in_args[i].begin(), in_args[i].end());
all_args.insert(
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/hlir/op/custom_call.cc
Original file line number Diff line number Diff line change
Expand Up @@ -996,7 +996,7 @@ bool RegisteryCustomCallArgsFunc() {
CustomCallArgsForCudnnPoolBackward);
#endif

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL

#endif

Expand Down
4 changes: 2 additions & 2 deletions paddle/cinn/hlir/op/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ std::shared_ptr<OpStrategy> StrategyForConv2d(
tensor_name,
target);
} else {
#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
out = pe::Conv2d_NCHW_MKLDNN(A.as_tensor_ref(),
B.as_tensor_ref(),
padding[0],
Expand Down Expand Up @@ -1897,7 +1897,7 @@ std::shared_ptr<OpStrategy> StrategyForSoftmax(
std::string tensor_name =
pack_args[pack_args.size() - 1].operator std::string();

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
if (use_mkldnn) {
out = pe::SoftmaxMKLDNN(A, new_axis, tensor_name);
} else {
Expand Down
4 changes: 2 additions & 2 deletions paddle/cinn/hlir/pe/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,7 @@ std::vector<ir::Tensor> Conv2d_NCHWc(const ir::Tensor &input,
return {packed_out, input_pad};
}

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
std::vector<ir::Tensor> Conv2d_NCHW_MKLDNN(const ir::Tensor &input,
const ir::Tensor &weights,
int pad_h,
Expand Down Expand Up @@ -1014,7 +1014,7 @@ std::vector<ir::Tensor> Softmax(const ir::Tensor &A,
return {out, temp};
}

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
std::vector<ir::Tensor> SoftmaxMKLDNN(const ir::Tensor &A,
int axis,
const std::string &output_name) {
Expand Down
4 changes: 2 additions & 2 deletions paddle/cinn/hlir/pe/nn.h
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ std::vector<ir::Tensor> Conv2d_NCHWc(
const std::string &output_name = UniqName("T_Conv2d_NCHWc_out"),
const common::Target &target = common::DefaultHostTarget());

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
std::vector<ir::Tensor> Conv2d_NCHW_MKLDNN(
const ir::Tensor &input,
const ir::Tensor &weights,
Expand Down Expand Up @@ -332,7 +332,7 @@ std::vector<ir::Tensor> Softmax(
int axis = -1,
const std::string &output_name = UniqName("T_softmax_out"));

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
std::vector<ir::Tensor> SoftmaxMKLDNN(
const ir::Tensor &A,
int axis = -1,
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/runtime/cpu/mkldnn_math.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
#pragma once
#include "paddle/cinn/runtime/cinn_runtime.h"

#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
#include "dnnl.hpp" // NOLINT
#endif

Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/runtime/cpu/use_extern_funcs.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ CINN_USE_REGISTER(host_intrinsics)
#ifdef CINN_WITH_MKL_CBLAS
CINN_USE_REGISTER(mkl_math)
CINN_USE_REGISTER(cinn_cpu_mkl)
#ifdef CINN_WITH_MKLDNN
#ifdef CINN_WITH_DNNL
CINN_USE_REGISTER(cinn_cpu_mkldnn)
#endif
#endif
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ void ProcessGroupCustom::CreateCustomManagerCache(
phi::DeviceGuard guard(places[i]);
ccl_comms[i] = CustomCCLCommManager::Create(
device_type, GetSize(), GetRank(), &ccl_id, new phi::ccl::CCLComm);
dev_ctx[i].reset(new CustomDeviceContext(places[i]));
dev_ctx[i] = std::make_unique<CustomDeviceContext>(places[i]);
dev_ctx[i]->SetAllocator(
&(phi::DeviceContextPool::Instance().Get(places[i])->GetAllocator()));
dev_ctx[i]->SetHostAllocator(&(
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/collective/process_group_nccl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ void ProcessGroupNCCL::CreateNCCLManagerCache(
for (size_t i = 0; i < places.size(); ++i) {
platform::CUDADeviceGuard guard(places[i]);

dev_ctx[i].reset(new phi::GPUContext(places[i]));
dev_ctx[i] = std::make_unique<phi::GPUContext>(places[i]);
ncclComm_t nccl_comm;
NCCL_CHECK(phi::dynload::ncclCommInitRank(
&nccl_comm, GetSize(), nccl_id, GetRank()));
Expand Down
8 changes: 4 additions & 4 deletions paddle/fluid/distributed/fleet_executor/dist_model.cc
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ void DistModel::InsertCommOp(std::string tmp_var_name,
}

bool DistModel::PrepareScope() {
scope_.reset(new framework::Scope());
scope_ = std::make_unique<framework::Scope>();
return true;
}

Expand Down Expand Up @@ -412,7 +412,7 @@ bool DistModel::LoadProgram() {
fin.close();
program_proto.ParseFromString(pb_content);
VLOG(5) << pb_content;
program_.reset(new framework::ProgramDesc(program_proto));
program_ = std::make_unique<framework::ProgramDesc>(program_proto);
return true;
}

Expand Down Expand Up @@ -469,7 +469,7 @@ bool DistModel::LoadParameters() {
}

bool DistModel::PrepareFleetExe() {
task_node_.reset(new TaskNode(program_.get(), config_.local_rank));
task_node_ = std::make_unique<TaskNode>(program_.get(), config_.local_rank);
// With auto cut, there is no concept of pp, no need to add dependency.
task_node_->SetType("Compute");
task_node_->Init();
Expand All @@ -487,7 +487,7 @@ bool DistModel::PrepareFleetExe() {
}
id_to_rank.insert({i, i});
}
fleet_exe.reset(new FleetExecutor(executor_desc_));
fleet_exe = std::make_unique<FleetExecutor>(executor_desc_);
fleet_exe->Init(carrier_id_,
*(program_.get()),
scope_.get(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -796,8 +796,8 @@ void AsyncCommunicator::InitImpl(const RpcCtxMap &send_varname_to_ctx,
send_varname_to_ctx_ = std::move(send_varname_to_ctx);
recv_varname_to_ctx_ = std::move(recv_varname_to_ctx);
recv_scope_ = std::move(recv_scope);
send_scope_.reset(new Scope());
xpu_temp_scope_.reset(new Scope());
send_scope_ = std::make_unique<Scope>();
xpu_temp_scope_ = std::make_unique<Scope>();
for (auto &iter : send_varname_to_ctx_) {
auto &ctx = iter.second;
auto &varnames = ctx.origin_varnames;
Expand All @@ -807,7 +807,7 @@ void AsyncCommunicator::InitImpl(const RpcCtxMap &send_varname_to_ctx,
send_queue_size_);
}
}
send_threadpool_.reset(new ::ThreadPool(thread_pool_size_));
send_threadpool_ = std::make_unique<::ThreadPool>(thread_pool_size_);
}

AsyncCommunicator::~AsyncCommunicator() {
Expand Down Expand Up @@ -1517,7 +1517,7 @@ void FLCommunicator::InitBrpcClient(
// before, but no need for Coordinator
}
if (coordinator_client_ptr_ == nullptr) {
coordinator_client_ptr_.reset(new CoordinatorClient);
coordinator_client_ptr_ = std::make_unique<CoordinatorClient>();
}
int16_t servers = host_sign_list.size();
coordinator_client_ptr_->_env = &ps_env_;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ int32_t MemorySparseGeoTable::Initialize() {
shards_task.reset(new ::ThreadPool(1));
}

_local_shards.reset(new shard_type[_task_pool_size]);
_local_shards.reset(new shard_type[_task_pool_size]); // NOLINT
return 0;
}

Expand Down
Loading

0 comments on commit 0cc754a

Please sign in to comment.