Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[jit] jit.save support property serialization #44581

Merged
merged 6 commits into from
Jul 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 0 additions & 9 deletions paddle/fluid/pybind/protobuf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -433,15 +433,6 @@ void BindJitProperty(pybind11::module *m) {
"set list of string",
py::arg("name"),
py::arg("val"))
.def("set_tensor",
[](const pd::VarDesc &tensor, const std::string name) {
throw platform::errors::Unimplemented("Not implement set_tensor.");
})
.def(
"set_tensors",
[](const pybind11::list &tensors, const std::string name) {
throw platform::errors::Unimplemented("Not implement set_tensors.");
})
.def("serialize_to_string", SerializeMessage<jit::Property>)
.def("parse_from_string", DeserializeMessage<jit::Property>);
}
Expand Down
1 change: 1 addition & 0 deletions python/paddle/fluid/dygraph/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
INFER_MODEL_SUFFIX = ".pdmodel"
INFER_PARAMS_SUFFIX = ".pdiparams"
INFER_PARAMS_INFO_SUFFIX = ".pdiparams.info"
INFER_PROPERTY_SUFFIX = '.meta'
zh794390558 marked this conversation as resolved.
Show resolved Hide resolved

LOADED_VAR_SUFFIX = "load"
PARAMETER_NAME_PREFIX = "param"
Expand Down
41 changes: 39 additions & 2 deletions python/paddle/fluid/dygraph/jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from collections import OrderedDict
import inspect
import threading
from typing import Text, Tuple, Any, List

import six
import paddle
Expand All @@ -34,7 +35,7 @@
from paddle.fluid.dygraph.dygraph_to_static.convert_call_func import ConversionOptions, CONVERSION_OPTIONS
from paddle.fluid.dygraph.dygraph_to_static.logging_utils import set_code_level, set_verbosity
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramTranslator, StaticFunction, unwrap_decorators
from paddle.fluid.dygraph.io import TranslatedLayer, INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX, INFER_PARAMS_INFO_SUFFIX
from paddle.fluid.dygraph.io import TranslatedLayer, INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX, INFER_PARAMS_INFO_SUFFIX, INFER_PROPERTY_SUFFIX
from paddle.fluid.dygraph.layers import Layer
from paddle.fluid.executor import Executor, scope_guard
from paddle.fluid.framework import Block, ParamBase, Program, Variable, Parameter, EagerParamBase
Expand Down Expand Up @@ -644,6 +645,40 @@ def wrapper(layer, path, input_spec=None, **configs):
return wrapper


def _save_property(filename: Text, property_vals: List[Tuple[Any, Text]]):
"""class property serialization.

Args:
filename (Text): *.meta
property_vals (List[Tuple): class property.
"""

def set_property(meta, key, val):
if isinstance(val, float):
meta.set_float(key, val)
elif isinstance(val, int):
meta.set_int(key, val)
elif isinstance(val, str):
meta.set_string(key, val)
elif isinstance(val, (tuple, list)):
if isinstance(val[0], float):
meta.set_floats(key, val)
elif isinstance(val[0], int):
meta.set_ints(key, val)
elif isinstance(val[0], str):
meta.set_strings(key, val)
else:
raise ValueError(f"Note support val type: {type(val)}")
return

with open(filename, 'wb') as f:
meta = paddle.framework.core.Property()
for item in property_vals:
val, key = item[0], item[1]
set_property(meta, key, val)
f.write(meta.serialize_to_string())


@_run_save_pre_hooks
@switch_to_static_graph
def save(layer, path, input_spec=None, **configs):
Expand Down Expand Up @@ -1034,7 +1069,9 @@ def fun(inputs):
filter(paddle.fluid.io.is_persistable,
all_vars)),
filename=params_filename)
# TODO: save property
# save property
property_filename = file_prefix + INFER_PROPERTY_SUFFIX
_save_property(property_filename, property_vals)

# NOTE(chenweihang): [ Save extra variable info ]
# save_inference_model will lose some important variable information, including:
Expand Down
57 changes: 51 additions & 6 deletions python/paddle/fluid/tests/unittests/test_jit_save_load.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -1156,7 +1157,7 @@ def forward(self, x):
class Net(paddle.nn.Layer):

def __init__(self):
super(Net, self).__init__()
super().__init__()
self.fc1 = paddle.nn.Linear(4, 4)
self.fc2 = paddle.nn.Linear(4, 4)
self.bias = 0.4
Expand Down Expand Up @@ -1185,13 +1186,49 @@ def infer(self, input):
def fbias(self):
return self.bias + 1

# For extra Tensor
@paddle.jit.to_static(property=True)
def down_sampling(self):
return 4

@paddle.jit.to_static(property=True)
def fstr(self):
return "save str property"

@paddle.jit.to_static(property=True)
def ints(self):
return [10, 20]

@paddle.jit.to_static(property=True)
def floats(self):
return [1.1, 2.2]

@paddle.jit.to_static(property=True)
def strs(self):
return ["hello", "world"]


class NetTensor(paddle.nn.Layer):

def __init__(self):
super().__init__()
self.fc1 = paddle.nn.Linear(4, 4)
self.fc2 = paddle.nn.Linear(4, 4)
self.bias = 0.4
self.flag = paddle.ones([2], dtype="int32")

@paddle.jit.to_static(input_spec=[InputSpec([None, 4], dtype='float32')])
def forward(self, x):
out = self.fc1(x)
out = paddle.nn.functional.relu(out)
out = paddle.mean(out)
return out

@paddle.jit.to_static(property=True)
def fflag(self):
return self.flag
return True


class TestJitSaveCombine(unittest.TestCase):
class TestJitSaveCombineProperty(unittest.TestCase):

def setUp(self):
# enable dygraph mode
Expand All @@ -1201,16 +1238,24 @@ def setUp(self):
def tearDown(self):
self.temp_dir.cleanup()

def test_save_load_finetune_load(self):
def test_jit_save_combine_property(self):
model_path = os.path.join(self.temp_dir.name,
"test_jit_save_combine/model")

# Use new namespace
with unique_name.guard():
net = Net()
#save
paddle.jit.save(net, model_path, combine_params=True)

def test_jit_save_tensor_property(self):
model_path = os.path.join(self.temp_dir.name,
"test_jit_save_combine/model")
# Use new namespace
with unique_name.guard():
net = NetTensor()

paddle.jit.save(net, model_path, combine_params=True)


class LayerLoadFinetune(paddle.nn.Layer):

Expand Down