From 0dae79a9f73936c4d0bfbd8073de1840e310e7ad Mon Sep 17 00:00:00 2001 From: WangZhen <23097963+0x45f@users.noreply.github.com> Date: Wed, 27 Jul 2022 14:03:17 +0800 Subject: [PATCH] [JitLayer]Remove include fluid head files in JitLayer (#44597) * Remove include fluid head files in JitLayer * Format code * Remove const to fix ci error * Fix param error * Polish jit layer include and cp some headers to python/include * Fix comment --- paddle/fluid/jit/all.h | 20 ++++++++++++++++++++ paddle/fluid/jit/base_function.h | 1 - paddle/fluid/jit/compilation_unit.cc | 2 ++ paddle/fluid/jit/compilation_unit.h | 5 +++-- paddle/fluid/jit/function_schema.cc | 14 +++++++------- paddle/fluid/jit/function_schema.h | 12 +++++++----- paddle/fluid/jit/function_utils.cc | 4 +++- paddle/fluid/jit/function_utils.h | 13 +++++++++---- paddle/fluid/jit/layer.cc | 20 ++++++++++++-------- paddle/fluid/jit/layer.h | 26 +++++++++++++++++--------- paddle/fluid/jit/serializer.cc | 8 ++++++-- paddle/fluid/jit/serializer.h | 20 +++++++++++++++----- paddle/fluid/jit/serializer_utils.cc | 5 +++++ paddle/fluid/jit/serializer_utils.h | 9 +++++++-- python/setup.py.in | 12 ++++++++++-- 15 files changed, 123 insertions(+), 48 deletions(-) create mode 100644 paddle/fluid/jit/all.h diff --git a/paddle/fluid/jit/all.h b/paddle/fluid/jit/all.h new file mode 100644 index 0000000000000..5a571a72a2824 --- /dev/null +++ b/paddle/fluid/jit/all.h @@ -0,0 +1,20 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "base_function.h" +#include "layer.h" +#include "serializer.h" +#include "serializer_utils.h" diff --git a/paddle/fluid/jit/base_function.h b/paddle/fluid/jit/base_function.h index df774d8fd84c7..50dadaf4ae227 100644 --- a/paddle/fluid/jit/base_function.h +++ b/paddle/fluid/jit/base_function.h @@ -15,7 +15,6 @@ #pragma once #include "paddle/phi/api/include/tensor.h" -#include "paddle/phi/core/dense_tensor.h" namespace paddle { namespace jit { diff --git a/paddle/fluid/jit/compilation_unit.cc b/paddle/fluid/jit/compilation_unit.cc index 60d42d045b0e3..5a434fba176d3 100644 --- a/paddle/fluid/jit/compilation_unit.cc +++ b/paddle/fluid/jit/compilation_unit.cc @@ -16,6 +16,8 @@ #include "paddle/phi/core/enforce.h" +#include "paddle/fluid/jit/base_function.h" + namespace paddle { namespace jit { diff --git a/paddle/fluid/jit/compilation_unit.h b/paddle/fluid/jit/compilation_unit.h index 45a771b649401..535e92fe88473 100644 --- a/paddle/fluid/jit/compilation_unit.h +++ b/paddle/fluid/jit/compilation_unit.h @@ -14,13 +14,14 @@ #pragma once +#include #include #include - -#include "paddle/fluid/jit/base_function.h" +#include namespace paddle { namespace jit { +class BaseFunction; using Name2FunctionMap = std::unordered_map>; diff --git a/paddle/fluid/jit/function_schema.cc b/paddle/fluid/jit/function_schema.cc index 20cbcfdbd1c88..8150d3b2e7589 100644 --- a/paddle/fluid/jit/function_schema.cc +++ b/paddle/fluid/jit/function_schema.cc @@ -14,6 +14,7 @@ #include "paddle/fluid/jit/function_schema.h" +#include "paddle/fluid/framework/program_desc.h" #include "paddle/phi/core/enforce.h" #include "paddle/fluid/jit/function_utils.h" @@ -52,14 +53,13 @@ void FunctionSchema::AddOutputArg(const std::string& name) { FunctionInfo::FunctionInfo(const std::string& func_name, const std::vector& param_names, const framework::ProgramDesc& program_desc) - : func_name_(func_name), - param_names_(param_names), - program_desc_(program_desc) { + : func_name_(func_name), param_names_(param_names) { + program_desc_.reset(new framework::ProgramDesc(program_desc)); // Parse FunctionSchema - for (auto& in_name : program_desc_.GetFeedTargetNames()) { + for (auto& in_name : program_desc_->GetFeedTargetNames()) { schema_.AddInputArg(in_name); } - for (auto& out_name : program_desc_.GetFetchTargetNames()) { + for (auto& out_name : program_desc_->GetFetchTargetNames()) { schema_.AddOutputArg(out_name); } } @@ -67,7 +67,7 @@ FunctionInfo::FunctionInfo(const std::string& func_name, const std::string& FunctionInfo::FunctionName() const { return func_name_; } const framework::ProgramDesc& FunctionInfo::ProgramDesc() const { - return program_desc_; + return *program_desc_.get(); } const std::vector& FunctionInfo::ParamNames() const { @@ -83,7 +83,7 @@ const std::vector FunctionInfo::OutputArgNames() const { } void FunctionInfo::RemoveDescFeedFetch() { - utils::RemoveFeedFetch(&program_desc_); + utils::RemoveFeedFetch(program_desc_.get()); } } // namespace jit diff --git a/paddle/fluid/jit/function_schema.h b/paddle/fluid/jit/function_schema.h index 5dcea8517e40e..9f593dd7eee24 100644 --- a/paddle/fluid/jit/function_schema.h +++ b/paddle/fluid/jit/function_schema.h @@ -14,15 +14,17 @@ #pragma once +#include #include #include -#include "paddle/fluid/framework/program_desc.h" -#include "paddle/fluid/framework/variable.h" - namespace paddle { + +namespace framework { +class ProgramDesc; +} // namespace framework + namespace jit { -using Variable = paddle::framework::Variable; class Argument { public: @@ -75,7 +77,7 @@ class FunctionInfo { private: std::string func_name_; std::vector param_names_; - framework::ProgramDesc program_desc_; + std::shared_ptr program_desc_; FunctionSchema schema_; }; diff --git a/paddle/fluid/jit/function_utils.cc b/paddle/fluid/jit/function_utils.cc index a6da061de99dc..83da12d2652a3 100644 --- a/paddle/fluid/jit/function_utils.cc +++ b/paddle/fluid/jit/function_utils.cc @@ -15,7 +15,9 @@ #include "paddle/fluid/jit/function_utils.h" #include "paddle/fluid/framework/program_desc.h" +#include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/var_desc.h" +#include "paddle/fluid/framework/variable.h" #include "paddle/phi/core/enforce.h" namespace paddle { @@ -75,7 +77,7 @@ void ShareParamsIntoScope(const std::vector ¶m_names, for (size_t i = 0; i < param_names.size(); ++i) { std::string name = param_names[i]; auto ¶m = params_dict.find(name)->second; - auto &dense_tensor = param.Get(); + auto &dense_tensor = param->Get(); VLOG(3) << "share into scope: " << name; auto *var = scope->Var(name); auto *dst_tensor = var->GetMutable(); diff --git a/paddle/fluid/jit/function_utils.h b/paddle/fluid/jit/function_utils.h index ba1eaf7308be9..90e2e4b7f798f 100644 --- a/paddle/fluid/jit/function_utils.h +++ b/paddle/fluid/jit/function_utils.h @@ -18,18 +18,23 @@ #include #include -#include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/framework/variable.h" #include "paddle/phi/api/include/tensor.h" #include "paddle/phi/common/place.h" -#include "paddle/phi/core/dense_tensor.h" #include "paddle/fluid/jit/function_schema.h" namespace paddle { + +namespace framework { +class Variable; +class ProgramDesc; +class Scope; +} // namespace framework + namespace jit { using Variable = paddle::framework::Variable; -using Name2VariableMap = std::unordered_map; +using Name2VariableMap = + std::unordered_map>; using DenseTensor = phi::DenseTensor; using Tensor = paddle::experimental::Tensor; diff --git a/paddle/fluid/jit/layer.cc b/paddle/fluid/jit/layer.cc index f5985d71b0347..0e981bc45957f 100644 --- a/paddle/fluid/jit/layer.cc +++ b/paddle/fluid/jit/layer.cc @@ -14,17 +14,21 @@ #include "paddle/fluid/jit/layer.h" +#include "paddle/fluid/framework/variable.h" + +#include "paddle/fluid/jit/base_function.h" +#include "paddle/fluid/jit/compilation_unit.h" +#include "paddle/fluid/jit/function_schema.h" + namespace paddle { namespace jit { -Layer::Layer(const std::vector>& infos, - const Name2VariableMap& params_dict, - const phi::Place& place) +Layer::Layer(const Name2VariableMap& params_dict, const phi::Place& place) : params_dict_(params_dict) { - VLOG(3) << "infos size: " << infos.size(); + unit_.reset(new CompilationUnit()); } std::shared_ptr Layer::Function(const std::string& name) const { - return unit_.Function(name); + return unit_->Function(name); } std::vector Layer::forward(const std::vector& inputs) { @@ -42,15 +46,15 @@ void Layer::to(const phi::Place& place) {} void Layer::SetFunction(const std::string& name, const std::shared_ptr& function) { - unit_.SetFunction(name, function); + unit_->SetFunction(name, function); } std::vector Layer::FunctionNames() const { - return unit_.FunctionNames(); + return unit_->FunctionNames(); } const Name2FunctionMap& Layer::FunctionMap() const { - return unit_.FunctionMap(); + return unit_->FunctionMap(); } } // namespace jit diff --git a/paddle/fluid/jit/layer.h b/paddle/fluid/jit/layer.h index ee75881fc3156..b2efa77fedf52 100644 --- a/paddle/fluid/jit/layer.h +++ b/paddle/fluid/jit/layer.h @@ -18,23 +18,31 @@ #include #include -#include "paddle/fluid/framework/variable.h" +#include "paddle/phi/api/include/tensor.h" #include "paddle/phi/common/place.h" -#include "paddle/fluid/jit/base_function.h" -#include "paddle/fluid/jit/compilation_unit.h" -#include "paddle/fluid/jit/function_schema.h" +#include "base_function.h" namespace paddle { + +namespace framework { +class Variable; +} // namespace framework + namespace jit { +class CompilationUnit; + +using DenseTensor = phi::DenseTensor; +using Tensor = paddle::experimental::Tensor; using Variable = paddle::framework::Variable; -using Name2VariableMap = std::unordered_map; +using Name2VariableMap = + std::unordered_map>; +using Name2FunctionMap = + std::unordered_map>; class Layer { public: - Layer(const std::vector>& infos, - const Name2VariableMap& params_dict, - const phi::Place& place); + Layer(const Name2VariableMap& params_dict, const phi::Place& place); std::shared_ptr Function(const std::string& name) const; @@ -56,7 +64,7 @@ class Layer { private: Name2VariableMap params_dict_; Name2VariableMap attrs_dict_; - CompilationUnit unit_; + std::shared_ptr unit_; }; } // namespace jit diff --git a/paddle/fluid/jit/serializer.cc b/paddle/fluid/jit/serializer.cc index 2dee9ee879a22..c24995f711826 100644 --- a/paddle/fluid/jit/serializer.cc +++ b/paddle/fluid/jit/serializer.cc @@ -16,10 +16,14 @@ #include +#include "paddle/fluid/framework/var_desc.h" +#include "paddle/fluid/framework/variable.h" #include "paddle/fluid/platform/device_context.h" #include "paddle/fluid/jit/executor_function.h" +#include "paddle/fluid/jit/layer.h" #include "paddle/fluid/jit/pe_function.h" +#include "paddle/fluid/jit/property.h" #include "paddle/fluid/jit/serializer_utils.h" DECLARE_string(jit_engine_type); @@ -55,7 +59,7 @@ Layer Deserializer::operator()(const std::string& path, ReadTensorData(path + PDPARAMS_SUFFIX, param_names_set, place, ¶ms_dict); // ReadAttributeData(); - Layer layer = Layer(infos, params_dict, place); + Layer layer = Layer(params_dict, place); for (auto& info : infos) { if (FLAGS_jit_engine_type == "Executor") { @@ -90,7 +94,7 @@ void Deserializer::ReadTensorData(const std::string& file_name, // TODO(dev): Support framework::Vocab DenseTensor* dense_tesnor = v.GetMutable(); framework::DeserializeFromStream(fin, dense_tesnor, dev_ctx); - (*params_dict)[*it] = v; + (*params_dict)[*it] = std::make_shared(v); } } diff --git a/paddle/fluid/jit/serializer.h b/paddle/fluid/jit/serializer.h index bdc3b81d55193..188239f469a57 100644 --- a/paddle/fluid/jit/serializer.h +++ b/paddle/fluid/jit/serializer.h @@ -14,16 +14,26 @@ #pragma once +#include +#include #include +#include -#include "paddle/fluid/framework/var_desc.h" -#include "paddle/fluid/framework/variable.h" -#include "paddle/fluid/jit/property.h" - -#include "paddle/fluid/jit/layer.h" +#include "paddle/phi/common/place.h" namespace paddle { + +namespace framework { +class Variable; +class ProgramDesc; +} // namespace framework + namespace jit { +class Layer; +using Variable = paddle::framework::Variable; +using Name2VariableMap = + std::unordered_map>; + // Export Layer into local disk class Serializer { public: diff --git a/paddle/fluid/jit/serializer_utils.cc b/paddle/fluid/jit/serializer_utils.cc index e68d75f58d56d..41bfa71b4ce25 100644 --- a/paddle/fluid/jit/serializer_utils.cc +++ b/paddle/fluid/jit/serializer_utils.cc @@ -17,6 +17,7 @@ #include #include +#include "paddle/fluid/framework/phi_utils.h" #include "paddle/fluid/framework/var_desc.h" namespace paddle { @@ -100,6 +101,10 @@ const std::vector> PdmodelFilePaths( return pdmodel_paths; } +void InitKernelSignatureMap() { + paddle::framework::InitDefaultKernelSignatureMap(); +} + } // namespace utils } // namespace jit } // namespace paddle diff --git a/paddle/fluid/jit/serializer_utils.h b/paddle/fluid/jit/serializer_utils.h index dfa980544bc31..97850504d9661 100644 --- a/paddle/fluid/jit/serializer_utils.h +++ b/paddle/fluid/jit/serializer_utils.h @@ -17,9 +17,12 @@ #include #include -#include "paddle/fluid/framework/var_desc.h" - namespace paddle { + +namespace framework { +class VarDesc; +} // namespace framework + namespace jit { static const char PDMODEL_SUFFIX[] = ".pdmodel"; static const char PDPARAMS_SUFFIX[] = ".pdiparams"; @@ -40,6 +43,8 @@ bool FileExists(const std::string& file_path); const std::vector> PdmodelFilePaths( const std::string& path); +void InitKernelSignatureMap(); + } // namespace utils } // namespace jit } // namespace paddle diff --git a/python/setup.py.in b/python/setup.py.in index c02ef7f017fca..1b36b272d0d70 100755 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -621,8 +621,12 @@ headers = ( list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/phi/kernels', recursive=True)) + # phi kernels headers # capi headers list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/phi/capi', recursive=True)) + # phi capi headers - # utila api headers - list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/utils', recursive=True))) # paddle utils headers + # utils api headers + list(find_files('*.h', '@PADDLE_SOURCE_DIR@/paddle/utils', recursive=True))) # paddle utils headers + +jit_layer_headers = ['layer.h', 'serializer.h', 'serializer_utils.h', 'all.h', 'base_function.h'] +for f in jit_layer_headers: + headers += list(find_files(f, '@PADDLE_SOURCE_DIR@/paddle/fluid/jit', recursive=False)) if '${WITH_MKLDNN}' == 'ON': headers += list(find_files('*', '${MKLDNN_INSTALL_DIR}/include')) # mkldnn @@ -667,6 +671,10 @@ class InstallHeaders(Command): elif 'third_party' not in header: # paddle headers install_dir = re.sub('@PADDLE_SOURCE_DIR@/', '', header) + print('install_dir: ', install_dir) + if 'fluid/jit' in install_dir: + install_dir = re.sub('fluid/jit', 'jit', install_dir) + print('fluid/jit install_dir: ', install_dir) else: # third_party install_dir = re.sub('${THIRD_PARTY_PATH}', 'third_party', header)