Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Cherry-pick] Several bugs fix #44991

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion paddle/fluid/platform/device_context.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ limitations under the License. */
#endif

#ifdef PADDLE_WITH_MKLDNN
#include "dnnl.hpp"
#include "dnnl.hpp" // NOLINT
#include "paddle/fluid/framework/data_layout.h"
#endif

Expand Down Expand Up @@ -902,6 +902,8 @@ class DeviceContextPool {
return *pool;
}

static bool IsInitialized() { return pool != nullptr; }

static void SetPool(DeviceContextPool* dev_pool) { pool = dev_pool; }

/*! \brief Return handle of single device context. */
Expand Down
3 changes: 1 addition & 2 deletions paddle/fluid/pybind/eager_functions.cc
Original file line number Diff line number Diff line change
Expand Up @@ -305,8 +305,7 @@ static std::vector<paddle::any> CastAttrsToTragetType(
attrs_names.size(), src.size()));
for (size_t i = 0; i < src.size(); i++) {
size_t end = attrs_names[i].find(": ");
std::string type_name =
attrs_names[i].substr(end + 2, attrs_names.size() - end - 2);
std::string type_name = attrs_names[i].substr(end + 2);
if (type_name == "int") {
if (src[i].type() == typeid(bool)) {
res.emplace_back(static_cast<int>(paddle::any_cast<bool>(src[i])));
Expand Down
314 changes: 220 additions & 94 deletions paddle/phi/api/lib/CMakeLists.txt

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions paddle/phi/api/lib/context_pool.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ limitations under the License. */
#include "paddle/phi/backends/all_context.h"
#include "paddle/phi/core/enforce.h"

#include "paddle/fluid/platform/init.h"

namespace paddle {
namespace experimental {

Expand All @@ -28,6 +30,9 @@ DeviceContextPool& DeviceContextPool::Instance() {
const phi::DeviceContext* DeviceContextPool::Get(const Place& place) {
auto it = context_map_.find(place);
if (it == context_map_.end()) {
if (!paddle::platform::DeviceContextPool::IsInitialized()) {
paddle::framework::InitDevices();
}
// only when we need the specific DeviceContext, get and cache it
auto* dev_ctx = paddle::platform::DeviceContextPool::Instance().Get(place);
{
Expand Down
10 changes: 7 additions & 3 deletions paddle/phi/api/lib/tensor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ limitations under the License. */

#include "glog/logging.h"

#include "paddle/phi/api/include/context_pool.h"
#include "paddle/phi/api/lib/utils/allocator.h"
#include "paddle/phi/backends/gpu/gpu_context.h"
#include "paddle/phi/backends/gpu/gpu_info.h"
#include "paddle/phi/core/ddim.h"
#include "paddle/phi/core/dense_tensor.h"
Expand All @@ -32,8 +34,7 @@ limitations under the License. */
#include "paddle/phi/core/tensor_base.h"
#include "paddle/phi/core/tensor_meta.h"
#include "paddle/phi/core/tensor_utils.h"

#include "paddle/fluid/platform/stream/cuda_stream.h"
// clang-format off

namespace paddle {
namespace experimental {
Expand Down Expand Up @@ -305,7 +306,10 @@ void Tensor::set_impl(std::shared_ptr<phi::TensorBase> &&impl) {

#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
gpuStream_t Tensor::stream() const {
return platform::stream::get_current_stream(-1)->raw_stream();
int device_id = phi::backends::gpu::GetCurrentDeviceId();
auto* gpu_context = DeviceContextPool::Instance()
.Get<AllocationType::GPU>(GPUPlace(device_id));
return gpu_context->stream();
}
#endif

Expand Down
83 changes: 44 additions & 39 deletions python/paddle/fluid/dygraph/jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ def _extract_vars(inputs, result_list, err_tag='inputs'):
_extract_vars(var, result_list, err_tag)
else:
raise TypeError(
"The type of 'each element of {}' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received {}.".
format(err_tag, type(inputs)))
"The type of 'each element of {}' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received {}."
.format(err_tag, type(inputs)))


def extract_vars(inputs, err_tag='inputs'):
Expand Down Expand Up @@ -211,29 +211,28 @@ def decorated(python_func):
_, python_func = unwrap_decorators(python_func)

# Step 2. copy some attributes from original python function.
static_layer = copy_decorator_attrs(
original_func=python_func,
decorated_obj=StaticFunction(
function=python_func,
input_spec=input_spec,
build_strategy=build_strategy))
static_layer = copy_decorator_attrs(original_func=python_func,
decorated_obj=StaticFunction(
function=python_func,
input_spec=input_spec,
build_strategy=build_strategy))

return static_layer

build_strategy = build_strategy or BuildStrategy()
if not isinstance(build_strategy, BuildStrategy):
raise TypeError(
"Required type(build_strategy) shall be `paddle.static.BuildStrategy`, but received {}".
format(type(build_strategy).__name__))
"Required type(build_strategy) shall be `paddle.static.BuildStrategy`, but received {}"
.format(type(build_strategy).__name__))

# for usage: `declarative(foo, ...)`
if function is not None:
if isinstance(function, Layer):
if isinstance(function.forward, StaticFunction):
class_name = function.__class__.__name__
logging_utils.warn(
"`{}.forward` has already been decorated somewhere. It will be redecorated to replace previous one.".
format(class_name))
"`{}.forward` has already been decorated somewhere. It will be redecorated to replace previous one."
.format(class_name))
function.forward = decorated(function.forward)
return function
else:
Expand Down Expand Up @@ -284,6 +283,7 @@ def func(x):


class _SaveLoadConfig(object):

def __init__(self):
self._output_spec = None
self._model_filename = None
Expand Down Expand Up @@ -371,7 +371,7 @@ def keep_name_table(self, value):


def _parse_save_configs(configs):
supported_configs = ['output_spec', "with_hook"]
supported_configs = ['output_spec', "with_hook", "clip_extra"]

# input check
for key in configs:
Expand All @@ -384,6 +384,7 @@ def _parse_save_configs(configs):
inner_config = _SaveLoadConfig()
inner_config.output_spec = configs.get('output_spec', None)
inner_config.with_hook = configs.get('with_hook', False)
inner_config.clip_extra = configs.get("clip_extra", False)

return inner_config

Expand Down Expand Up @@ -622,6 +623,7 @@ def _remove_save_pre_hook(hook):


def _run_save_pre_hooks(func):

def wrapper(layer, path, input_spec=None, **configs):
global _save_pre_hooks
for hook in _save_pre_hooks:
Expand Down Expand Up @@ -775,8 +777,8 @@ def fun(inputs):
"The paddle.jit.save doesn't work when setting ProgramTranslator.enable to False."
)

if not (isinstance(layer, Layer) or inspect.isfunction(layer) or isinstance(
layer, StaticFunction)):
if not (isinstance(layer, Layer) or inspect.isfunction(layer)
or isinstance(layer, StaticFunction)):
raise TypeError(
"The input of paddle.jit.save should be 'Layer' or 'Function', but received input type is %s."
% type(layer))
Expand Down Expand Up @@ -837,7 +839,7 @@ def fun(inputs):
# parse configs
configs = _parse_save_configs(configs)
# whether outermost layer has pre/post hook, if does, we need also save
# these operators in program.
# these operators in program.
with_hook = configs.with_hook

scope = core.Scope()
Expand All @@ -848,7 +850,9 @@ def fun(inputs):
with_hook = True
else:
# layer is function
functions = [layer, ]
functions = [
layer,
]
for attr_func in functions:
if isinstance(layer, Layer):
static_func = getattr(inner_layer, attr_func, None)
Expand All @@ -862,8 +866,8 @@ def fun(inputs):
if inner_input_spec:
inner_input_spec = pack_sequence_as(input_spec,
inner_input_spec)
static_forward = declarative(
inner_layer.forward, input_spec=inner_input_spec)
static_forward = declarative(inner_layer.forward,
input_spec=inner_input_spec)
concrete_program = static_forward.concrete_program_specify_input_spec(
with_hook=with_hook)
# the input_spec has been used in declarative, which is equal to
Expand All @@ -882,14 +886,14 @@ def fun(inputs):
if inner_input_spec:
inner_input_spec = pack_sequence_as(input_spec,
inner_input_spec)
static_function = declarative(
attr_func, input_spec=inner_input_spec)
static_function = declarative(attr_func,
input_spec=inner_input_spec)
concrete_program = static_function.concrete_program

if static_function._class_instance is None:
warnings.warn(
'`jit.save` will only save the `Program`, not the parameters. If you have to save the parameters, please make sure that {} is a member function of `paddle.nn.Layer` and the saved parameters are in `state_dict`'.
format(layer))
'`jit.save` will only save the `Program`, not the parameters. If you have to save the parameters, please make sure that {} is a member function of `paddle.nn.Layer` and the saved parameters are in `state_dict`'
.format(layer))

dygraph_state_dict = None
if isinstance(inner_layer, Layer):
Expand Down Expand Up @@ -922,8 +926,8 @@ def fun(inputs):
param_or_buffer_tensor = scope.var(
param_or_buffer.name).get_tensor()
#src_tensor = param_or_buffer.value().get_tensor()
src_tensor = state_var_dict[param_or_buffer.name].value(
).get_tensor()
src_tensor = state_var_dict[
param_or_buffer.name].value().get_tensor()
param_or_buffer_tensor._share_data_with(src_tensor)
# record var info
if param_or_buffer.name not in extra_var_info:
Expand Down Expand Up @@ -986,7 +990,7 @@ def fun(inputs):
params_filename=params_filename,
export_for_deployment=configs._export_for_deployment,
program_only=configs._program_only,
clip_extra=False)
clip_extra=configs.clip_extra)

# NOTE(chenweihang): [ Save extra variable info ]
# save_inference_model will lose some important variable information, including:
Expand Down Expand Up @@ -1534,14 +1538,16 @@ def forward(self, input):
"fluid.dygraph.jit.TracedLayer.save_inference_model")
if isinstance(feed, list):
for f in feed:
check_type(f, "each element of feed", int,
"fluid.dygraph.jit.TracedLayer.save_inference_model")
check_type(
f, "each element of feed", int,
"fluid.dygraph.jit.TracedLayer.save_inference_model")
check_type(fetch, "fetch", (type(None), list),
"fluid.dygraph.jit.TracedLayer.save_inference_model")
if isinstance(fetch, list):
for f in fetch:
check_type(f, "each element of fetch", int,
"fluid.dygraph.jit.TracedLayer.save_inference_model")
check_type(
f, "each element of fetch", int,
"fluid.dygraph.jit.TracedLayer.save_inference_model")
clip_extra = kwargs.get('clip_extra', False)
# path check
file_prefix = os.path.basename(path)
Expand Down Expand Up @@ -1575,12 +1581,11 @@ def get_feed_fetch(all_vars, partial_vars):
model_filename = file_prefix + INFER_MODEL_SUFFIX
params_filename = file_prefix + INFER_PARAMS_SUFFIX

save_inference_model(
dirname=dirname,
feeded_var_names=feeded_var_names,
target_vars=target_vars,
executor=self._exe,
main_program=self._program.clone(),
model_filename=model_filename,
params_filename=params_filename,
clip_extra=clip_extra)
save_inference_model(dirname=dirname,
feeded_var_names=feeded_var_names,
target_vars=target_vars,
executor=self._exe,
main_program=self._program.clone(),
model_filename=model_filename,
params_filename=params_filename,
clip_extra=clip_extra)
3 changes: 2 additions & 1 deletion python/setup.py.in
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,8 @@ if '${CMAKE_BUILD_TYPE}' == 'Release':
commands = ["install_name_tool -id '@loader_path/../libs/' ${PADDLE_BINARY_DIR}/python/paddle/fluid/${FLUID_CORE_NAME}" + '.so']
commands.append("install_name_tool -add_rpath '@loader_path/../libs/' ${PADDLE_BINARY_DIR}/python/paddle/fluid/${FLUID_CORE_NAME}" + '.so')
else:
commands = ["patchelf --set-rpath '$ORIGIN/../libs/' ${PADDLE_BINARY_DIR}/python/paddle/fluid/${FLUID_CORE_NAME}" + '.so']
commands = ["patchelf --set-soname '${FLUID_CORE_NAME}.so' ${PADDLE_BINARY_DIR}/python/paddle/fluid/${FLUID_CORE_NAME}" + '.so']
commands.append("patchelf --set-rpath '$ORIGIN/../libs/' ${PADDLE_BINARY_DIR}/python/paddle/fluid/${FLUID_CORE_NAME}" + '.so')
# The sw_64 not suppot patchelf, so we just disable that.
if platform.machine() != 'sw_64' and platform.machine() != 'mips64':
for command in commands:
Expand Down