From b8e3e0abef88fe4c54431466de1724afd70817d8 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Fri, 24 Jun 2022 09:14:00 +0000 Subject: [PATCH 1/9] merge and add base support for non-local for --- .../dygraph_to_static/convert_operators.py | 19 +++--- .../dygraph_to_static/ifelse_transformer.py | 45 +------------- .../dygraph_to_static/loop_transformer.py | 51 +++++++++------- .../dygraph_to_static/variable_trans_func.py | 58 ++++++++++++++++++- 4 files changed, 98 insertions(+), 75 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index 0346e4f1efda8..ac1619f8bf3e7 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -24,7 +24,7 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar -def convert_while_loop(cond, body, loop_vars): +def convert_while_loop(cond, body, getter, setter): """ A function representation of a Python ``while`` statement. @@ -39,25 +39,26 @@ def convert_while_loop(cond, body, loop_vars): # NOTE: It may be slower if cond is very expensive, but usually cond is just O(1). # If loop_vars is changed during cond callable, then it causes bug, but current logical_and/logical_not/... doesn't change the loop_vars. - pred = cond(*loop_vars) + pred = cond() if isinstance(pred, Variable): - loop_vars = _run_paddle_while_loop(cond, body, loop_vars) + loop_vars = _run_paddle_while_loop(cond, body, getter, setter) else: - loop_vars = _run_py_while(cond, body, loop_vars) + loop_vars = _run_py_while(cond, body, getter, setter) return loop_vars -def _run_paddle_while_loop(cond, body, loop_vars): +def _run_paddle_while_loop(cond, body, getter, setter): # NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors. - loop_vars = [to_static_variable(var) for var in loop_vars] + loop_vars = [to_static_variable(var) for var in getter()] + setter(loop_vars) loop_vars = control_flow.while_loop(cond, body, loop_vars) return loop_vars -def _run_py_while(cond, body, loop_vars): - while cond(*loop_vars): - loop_vars = body(*loop_vars) +def _run_py_while(cond, body, getter, setter): + while cond(): + loop_vars = body() return loop_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py index bff41c9b9ae02..64ce18c65935d 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py @@ -32,6 +32,7 @@ from paddle.fluid.dygraph.dygraph_to_static.static_analysis import AstNodeWrapper from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_undefined_var from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_nonlocal_stmt_node +from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_get_args_node, create_set_args_node TRUE_FUNC_PREFIX = 'true_fn' FALSE_FUNC_PREFIX = 'false_fn' @@ -511,11 +512,11 @@ def transform_if_else(node, root): if any([not isinstance(ctx, gast.Load) for ctx in ctxs]): parent_ids_set.add(k) - trun_args = parse_cond_args(parent_ids_set, body_name_ids, + true_args = parse_cond_args(parent_ids_set, body_name_ids, modified_name_ids_from_parent) false_args = parse_cond_args(parent_ids_set, orelse_name_ids, modified_name_ids_from_parent) - nonlocal_names = list(trun_args | false_args | new_vars_to_create) + nonlocal_names = list(true_args | false_args | new_vars_to_create) nonlocal_names.sort() # NOTE: All var in return_name_ids should be in nonlocal_names. nonlocal_names = _valid_nonlocal_names(return_name_ids, nonlocal_names) @@ -551,46 +552,6 @@ def transform_if_else(node, root): return create_new_vars_in_parent_stmts, true_func_node, false_func_node, get_args_node, set_args_node, return_name_ids -def create_get_args_node(names): - """ - Create get_args function as follows: - - def get_args_0(): - nonlocal x, y - """ - assert isinstance(names, (list, tuple)) - template = """ - def {func_name}(): - nonlocal {vars} - return {vars} - """ - func_def = template.format( - func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX), - vars=",".join(names)) - return gast.parse(textwrap.dedent(func_def)).body[0] - - -def create_set_args_node(names): - """ - Create set_args function as follows: - - def set_args_0(__args): - nonlocal x, y - x, y = __args - """ - assert isinstance(names, (list, tuple)) - template = """ - def {func_name}({args}): - nonlocal {vars} - {vars} = {args} - """ - func_def = template.format( - func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), - args=ARGS_NAME, - vars=",".join(names)) - return gast.parse(textwrap.dedent(func_def)).body[0] - - def create_convert_ifelse_node(return_name_ids, pred, true_func, diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index 045878ed54e1d..8c6bc84e23a17 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -29,6 +29,8 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import ForNodeVisitor from paddle.fluid.dygraph.dygraph_to_static.utils import RenameTransformer from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_static_variable_gast_node +from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_undefined_var +from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_nonlocal_stmt_node, create_get_args_node, create_set_args_node __all__ = ['LoopTransformer', 'NameVisitor'] @@ -41,8 +43,11 @@ ATTRIBUTE_VARIABLE_PREFIX = '__attribute_variable' +ARGS_NAME = '__args' -def create_while_nodes(condition_name, body_name, loop_var_names): + +def create_while_nodes(condition_name, body_name, loop_var_names, getter_name, + setter_name): """ Returns a list of gast.Node which represents the calling of Paddle controlflow while_loop. @@ -74,7 +79,6 @@ def create_while_nodes(condition_name, body_name, loop_var_names): # # For example: loop_var_names = [a, b, foo.x], the type of `a` or `b` is gast.Name, # but the type of `foo.x` gast.Attribute. - unique_name_to_origin = {} # We have to make loop_var_names and assign_loop_var_names with same order # set doesn't have order so we convert it to list @@ -90,9 +94,9 @@ def create_while_nodes(condition_name, body_name, loop_var_names): assign_loop_var_names.append(name) while_func_name = "_jst.convert_while_loop" - while_node_str = "[{}] = {}({}, {}, [{}])".format( - ",".join(assign_loop_var_names), while_func_name, condition_name, - body_name, ",".join(loop_var_names)) + while_node_str = "{}({}, {}, {}, {})".format(while_func_name, + condition_name, body_name, + getter_name, setter_name) while_node = gast.parse(while_node_str).body[0] ret = [while_node] @@ -575,7 +579,16 @@ def get_for_stmt_nodes(self, node): # We need to create static variable for those variables for name in create_var_names: if "." not in name: - new_stmts.append(create_static_variable_gast_node(name)) + new_stmts.append(create_undefined_var(name)) + + # create non-local statement for body and cond. + nonlocal_names = list(loop_var_names | create_var_names) + nonlocal_names.sort() + # TODO(dev): Need a better way to deal this. + if ARGS_NAME in nonlocal_names: + nonlocal_names.remove(ARGS_NAME) + + nonlocal_stmt_node = [create_nonlocal_stmt_node(nonlocal_names)] # 4. append init statements new_stmts.extend(init_stmts) @@ -583,22 +596,18 @@ def get_for_stmt_nodes(self, node): # 5. create & append condition function node condition_func_node = gast.FunctionDef( name=unique_name.generate(FOR_CONDITION_PREFIX), - args=gast.arguments(args=[ - gast.Name(id=name, - ctx=gast.Param(), - annotation=None, - type_comment=None) for name in loop_var_names - ], + args=gast.arguments(args=[], posonlyargs=[], vararg=None, kwonlyargs=[], kw_defaults=None, kwarg=None, defaults=[]), - body=[gast.Return(value=cond_stmt)], + body=nonlocal_stmt_node + [gast.Return(value=cond_stmt)], decorator_list=[], returns=None, type_comment=None) + # TODO(??? xiongkun: why we need rename ?) for name in loop_var_names: if "." in name: rename_transformer = RenameTransformer(condition_func_node) @@ -613,19 +622,14 @@ def get_for_stmt_nodes(self, node): loop_var_names, ctx=gast.Load(), gen_tuple_if_single=True))) body_func_node = gast.FunctionDef( name=unique_name.generate(FOR_BODY_PREFIX), - args=gast.arguments(args=[ - gast.Name(id=name, - ctx=gast.Param(), - annotation=None, - type_comment=None) for name in loop_var_names - ], + args=gast.arguments(args=[], posonlyargs=[], vararg=None, kwonlyargs=[], kw_defaults=None, kwarg=None, defaults=[]), - body=body_stmts, + body=nonlocal_stmt_node + body_stmts, decorator_list=[], returns=None, type_comment=None) @@ -636,10 +640,15 @@ def get_for_stmt_nodes(self, node): name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(body_func_node) + get_args_node = create_get_args_node(nonlocal_names) + set_args_node = create_set_args_node(nonlocal_names) # 7. create & append while loop node while_loop_nodes = create_while_nodes(condition_func_node.name, body_func_node.name, - loop_var_names) + loop_var_names, + get_args_node.name, + set_args_node.name) + new_stmts.extend([get_args_node, set_args_node]) new_stmts.extend(while_loop_nodes) return new_stmts diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index e823813acaacb..f6e0a39b81f02 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -16,6 +16,7 @@ import six import paddle +import textwrap from paddle.utils import gast from paddle.fluid import core from paddle.fluid import unique_name @@ -23,9 +24,15 @@ from paddle.fluid.layer_helper import LayerHelper __all__ = [ - 'create_bool_as_type', 'create_fill_constant_node', - 'create_static_variable_gast_node', 'data_layer_not_check', - 'to_static_variable', 'to_static_variable_gast_node', 'create_undefined_var' + 'create_bool_as_type', + 'create_fill_constant_node', + 'create_static_variable_gast_node', + 'data_layer_not_check', + 'to_static_variable', + 'to_static_variable_gast_node', + 'create_undefined_var', + 'create_get_args_node', + 'create_set_args_node', ] @@ -136,3 +143,48 @@ def create_bool_as_type(x, value=True): return paddle.full(shape=[1], fill_value=value, dtype="bool") else: return value + + +def create_get_args_node(names): + """ + Create get_args function as follows: + + def get_args_0(): + nonlocal x, y + """ + assert isinstance(names, (list, tuple)) + template = """ + def {func_name}(): + nonlocal {vars} + return {vars} + """ + func_def = template.format( + func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX), + vars=",".join(names)) + return gast.parse(textwrap.dedent(func_def)).body[0] + + +GET_ARGS_FUNC_PREFIX = 'get_args' +SET_ARGS_FUNC_PREFIX = 'set_args' +ARGS_NAME = '__args' + + +def create_set_args_node(names): + """ + Create set_args function as follows: + + def set_args_0(__args): + nonlocal x, y + x, y = __args + """ + assert isinstance(names, (list, tuple)) + template = """ + def {func_name}({args}): + nonlocal {vars} + {vars} = {args} + """ + func_def = template.format( + func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), + args=ARGS_NAME, + vars=",".join(names)) + return gast.parse(textwrap.dedent(func_def)).body[0] From b92c95030bf042705624104b19bb350a868a7c25 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Mon, 27 Jun 2022 08:27:15 +0000 Subject: [PATCH 2/9] for and while non-local support --- .../dygraph_to_static/convert_operators.py | 4 +- .../dygraph_to_static/loop_transformer.py | 84 +++++++++---------- .../dygraph_to_static/variable_trans_func.py | 33 +++++++- .../unittests/dygraph_to_static/test_loop.py | 2 +- 4 files changed, 75 insertions(+), 48 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index ac1619f8bf3e7..098fca668abb2 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -51,8 +51,10 @@ def convert_while_loop(cond, body, getter, setter): def _run_paddle_while_loop(cond, body, getter, setter): # NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors. loop_vars = [to_static_variable(var) for var in getter()] - setter(loop_vars) + setter(loop_vars) # change the non-local var to variable + # variable maybe modified to inner var. change it into loop_vars = control_flow.while_loop(cond, body, loop_vars) + setter(loop_vars) # change the inner-scope var return loop_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index d215576f2735e..1675333eedd81 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -41,8 +41,6 @@ FOR_BODY_PREFIX = 'for_loop_body' GENERATE_VARIABLE_PREFIX = 'generate_variable' -ATTRIBUTE_VARIABLE_PREFIX = '__attribute_variable' - ARGS_NAME = '__args' @@ -79,19 +77,12 @@ def create_while_nodes(condition_name, body_name, loop_var_names, getter_name, # # For example: loop_var_names = [a, b, foo.x], the type of `a` or `b` is gast.Name, # but the type of `foo.x` gast.Attribute. - unique_name_to_origin = {} # We have to make loop_var_names and assign_loop_var_names with same order # set doesn't have order so we convert it to list loop_var_names = list(loop_var_names) assign_loop_var_names = [] for name in (loop_var_names): - if "." in name: - # name is an attribute variable such as foo.x - tmp_attr_name = unique_name.generate(ATTRIBUTE_VARIABLE_PREFIX) - unique_name_to_origin[tmp_attr_name] = name - assign_loop_var_names.append(tmp_attr_name) - else: - assign_loop_var_names.append(name) + assign_loop_var_names.append(name) while_func_name = "_jst.convert_while_loop" while_node_str = "{}({}, {}, {}, {})".format(while_func_name, @@ -100,15 +91,6 @@ def create_while_nodes(condition_name, body_name, loop_var_names, getter_name, while_node = gast.parse(while_node_str).body[0] ret = [while_node] - for tmp_attr_name in unique_name_to_origin: - origin_attr_var = unique_name_to_origin[tmp_attr_name] - dot_pos = origin_attr_var.rindex(".") - obj_name = origin_attr_var[0:dot_pos] - attr_name = origin_attr_var[dot_pos + 1:] - assign_if_not_prop_str = "if not isinstance(getattr(type({}), '{}', None), property): {} = {}".format( - obj_name, attr_name, origin_attr_var, tmp_attr_name) - assign_if_not_prop_node = gast.parse(assign_if_not_prop_str).body[0] - ret.append(assign_if_not_prop_node) return ret @@ -286,9 +268,7 @@ def get_loop_var_names(self, node): # If this var is a basic variable and read-only and not # condition var, it may not be loop_var else it should # be in loop_var as input - if (not name in condition_names) and ( - not name in write_names - ) and self._node_var_type_is_basic(name_to_type[name]): + if (not name in condition_names) and (not name in write_names): continue loop_var_names.add(name) @@ -695,7 +675,10 @@ def get_for_stmt_nodes(self, node): name=unique_name.generate(FOR_CONDITION_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=None, + vararg=gast.Name(id="args", + ctx=gast.Param(), + annotation=None, + type_comment=None), kwonlyargs=[], kw_defaults=None, kwarg=None, @@ -716,12 +699,15 @@ def get_for_stmt_nodes(self, node): # append return values for loop body body_stmts.append( gast.Return(value=generate_name_node( - loop_var_names, ctx=gast.Load(), gen_tuple_if_single=True))) + nonlocal_names, ctx=gast.Load(), gen_tuple_if_single=True))) body_func_node = gast.FunctionDef( name=unique_name.generate(FOR_BODY_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=None, + vararg=gast.Name(id="args", + ctx=gast.Param(), + annotation=None, + type_comment=None), kwonlyargs=[], kw_defaults=None, kwarg=None, @@ -742,7 +728,7 @@ def get_for_stmt_nodes(self, node): # 7. create & append while loop node while_loop_nodes = create_while_nodes(condition_func_node.name, body_func_node.name, - loop_var_names, + nonlocal_names, get_args_node.name, set_args_node.name) new_stmts.extend([get_args_node, set_args_node]) @@ -755,6 +741,15 @@ def get_while_stmt_nodes(self, node): node) new_stmts = [] + # create non-local statement for body and cond. + nonlocal_names = list(loop_var_names | create_var_names) + nonlocal_names.sort() + # TODO(dev): Need a better way to deal this. + if ARGS_NAME in nonlocal_names: + nonlocal_names.remove(ARGS_NAME) + + nonlocal_stmt_node = [create_nonlocal_stmt_node(nonlocal_names)] + # Python can create variable in loop and use it out of loop, E.g. # # while x < 10: @@ -769,19 +764,17 @@ def get_while_stmt_nodes(self, node): condition_func_node = gast.FunctionDef( name=unique_name.generate(WHILE_CONDITION_PREFIX), - args=gast.arguments(args=[ - gast.Name(id=name, - ctx=gast.Param(), - annotation=None, - type_comment=None) for name in loop_var_names - ], + args=gast.arguments(args=[], posonlyargs=[], - vararg=None, + vararg=gast.Name(id="args", + ctx=gast.Param(), + annotation=None, + type_comment=None), kwonlyargs=[], kw_defaults=None, kwarg=None, defaults=[]), - body=[gast.Return(value=node.test)], + body=nonlocal_stmt_node + [gast.Return(value=node.test)], decorator_list=[], returns=None, type_comment=None) @@ -796,22 +789,20 @@ def get_while_stmt_nodes(self, node): new_body = node.body new_body.append( gast.Return(value=generate_name_node( - loop_var_names, ctx=gast.Load(), gen_tuple_if_single=True))) + nonlocal_names, ctx=gast.Load(), gen_tuple_if_single=True))) body_func_node = gast.FunctionDef( name=unique_name.generate(WHILE_BODY_PREFIX), - args=gast.arguments(args=[ - gast.Name(id=name, - ctx=gast.Param(), - annotation=None, - type_comment=None) for name in loop_var_names - ], + args=gast.arguments(args=[], posonlyargs=[], - vararg=None, + vararg=gast.Name(id="args", + ctx=gast.Param(), + annotation=None, + type_comment=None), kwonlyargs=[], kw_defaults=None, kwarg=None, defaults=[]), - body=new_body, + body=nonlocal_stmt_node + new_body, decorator_list=[], returns=None, type_comment=None) @@ -821,9 +812,14 @@ def get_while_stmt_nodes(self, node): rename_transformer.rename( name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(body_func_node) + get_args_node = create_get_args_node(nonlocal_names) + set_args_node = create_set_args_node(nonlocal_names) while_loop_nodes = create_while_nodes(condition_func_node.name, body_func_node.name, - loop_var_names) + nonlocal_names, + get_args_node.name, + set_args_node.name) + new_stmts.extend([get_args_node, set_args_node]) new_stmts.extend(while_loop_nodes) return new_stmts diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index f6e0a39b81f02..504bb28072cca 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -88,6 +88,15 @@ def create_undefined_var(name): def create_nonlocal_stmt_node(names): assert isinstance(names, (list, tuple)) + + def remove_attribute(x): + if '.' in x: return x.split('.')[0] + else: return x + + mapped = list(map(remove_attribute, names)) + names = sorted( + mapped, + key=mapped.index) # to keep the order, we can't use set() to unique func_code = "nonlocal {}".format(','.join(names)) return gast.parse(func_code).body[0] @@ -153,13 +162,23 @@ def get_args_0(): nonlocal x, y """ assert isinstance(names, (list, tuple)) + + def remove_attribute(x): + if '.' in x: return x.split('.')[0] + else: return x + + mapped = list(map(remove_attribute, names)) + nonlocal_names = sorted( + mapped, + key=mapped.index) # to keep the order, we can't use set() to unique template = """ def {func_name}(): - nonlocal {vars} + nonlocal {nonlocal_vars} return {vars} """ func_def = template.format( func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX), + nonlocal_vars=','.join(nonlocal_names), vars=",".join(names)) return gast.parse(textwrap.dedent(func_def)).body[0] @@ -178,13 +197,23 @@ def set_args_0(__args): x, y = __args """ assert isinstance(names, (list, tuple)) + + def remove_attribute(x): + if '.' in x: return x.split('.')[0] + else: return x + + mapped = list(map(remove_attribute, names)) + nonlocal_names = sorted( + mapped, + key=mapped.index) # to keep the order, we can't use set() to unique template = """ def {func_name}({args}): - nonlocal {vars} + nonlocal {nonlocal_vars} {vars} = {args} """ func_def = template.format( func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), args=ARGS_NAME, + nonlocal_vars=','.join(nonlocal_names), vars=",".join(names)) return gast.parse(textwrap.dedent(func_def)).body[0] diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_loop.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_loop.py index 78d97a3884aed..683135b9078dc 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_loop.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_loop.py @@ -270,7 +270,7 @@ def test_nested_loop_vars(self): self.loop_var_names = [ set(["j", "two"]), set(["i", "three", "b"]), - set(["i", "j"]) + set(["i"]) ] self.create_var_names = [set(), set(["b"]), set()] From afbd5f04b9bdc79951686595f9cc5a999cfdd3a1 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Tue, 28 Jun 2022 03:55:50 +0000 Subject: [PATCH 3/9] fix ci errors: v1 --- .../dygraph_to_static/convert_operators.py | 2 ++ .../dygraph_to_static/ifelse_transformer.py | 5 ++-- .../dygraph_to_static/loop_transformer.py | 13 +++++----- .../fluid/dygraph/dygraph_to_static/utils.py | 10 +++++++ .../dygraph_to_static/variable_trans_func.py | 26 ++++++++++--------- .../unittests/dygraph_to_static/test_list.py | 1 - 6 files changed, 36 insertions(+), 21 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index 098fca668abb2..248d013e13c9a 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -50,6 +50,7 @@ def convert_while_loop(cond, body, getter, setter): def _run_paddle_while_loop(cond, body, getter, setter): # NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors. + # UndefinedVar will become data layer not check. loop_vars = [to_static_variable(var) for var in getter()] setter(loop_vars) # change the non-local var to variable # variable maybe modified to inner var. change it into @@ -59,6 +60,7 @@ def _run_paddle_while_loop(cond, body, getter, setter): def _run_py_while(cond, body, getter, setter): + loop_vars = getter() while cond(): loop_vars = body() return loop_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py index bb549064cee53..47d783c8b6ccc 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py @@ -420,13 +420,14 @@ def _vars_loaded(ids_dict): modified_vars = body_modified_vars | orelse_modified_vars # new vars + # TODO(remove __args when new FunctionScopeAnalysis has been used.) body_new_vars = set([ var for var in _vars_with_store(if_vars_dict) - if var not in parent_vars_dict + if var not in parent_vars_dict and var != "__args" ]) orelse_new_vars = set([ var for var in _vars_with_store(else_vars_dict) - if var not in parent_vars_dict + if var not in parent_vars_dict and var != "__args" ]) new_vars_in_body_or_orelse = body_new_vars | orelse_new_vars new_vars_in_one_of_body_or_orelse = body_new_vars ^ orelse_new_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index 1675333eedd81..23f0648455456 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -103,8 +103,10 @@ def __init__(self): self.globals = set() self.nonlocals = set() self.args = set() - self.w_vars = set() # all vars been stored, + # all vars been stored, # may be globals or non-locals + self.w_vars = set() + def created_vars(self): return self.w_vars - self.globals - self.nonlocals - self.args @@ -629,7 +631,6 @@ def get_for_stmt_nodes(self, node): if stmts_tuple is None: return [node] init_stmts, cond_stmt, body_stmts = stmts_tuple - # 2. get original loop vars loop_var_names, create_var_names = self.name_visitor.get_loop_var_names( node) @@ -675,7 +676,7 @@ def get_for_stmt_nodes(self, node): name=unique_name.generate(FOR_CONDITION_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=gast.Name(id="args", + vararg=gast.Name(id=ARGS_NAME, ctx=gast.Param(), annotation=None, type_comment=None), @@ -704,7 +705,7 @@ def get_for_stmt_nodes(self, node): name=unique_name.generate(FOR_BODY_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=gast.Name(id="args", + vararg=gast.Name(id=ARGS_NAME, ctx=gast.Param(), annotation=None, type_comment=None), @@ -766,7 +767,7 @@ def get_while_stmt_nodes(self, node): name=unique_name.generate(WHILE_CONDITION_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=gast.Name(id="args", + vararg=gast.Name(id=ARGS_NAME, ctx=gast.Param(), annotation=None, type_comment=None), @@ -794,7 +795,7 @@ def get_while_stmt_nodes(self, node): name=unique_name.generate(WHILE_BODY_PREFIX), args=gast.arguments(args=[], posonlyargs=[], - vararg=gast.Name(id="args", + vararg=gast.Name(id=ARGS_NAME, ctx=gast.Param(), annotation=None, type_comment=None), diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index 2df8169a3efe1..e1ae58569c53c 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -842,6 +842,16 @@ def visit_Name(self, node): return self.replace_node return node + def visit_Nonlocal(self, node): + names = node.names + + def replace(s): + if s == self.target_name: return self.replace_node.id + return s + + node.names = list(map(replace, names)) + return node + class ForLoopTuplePreTransformer(gast.NodeTransformer): """ diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index 99925424bfdc8..83bae73dc48ab 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -22,6 +22,7 @@ from paddle.fluid import unique_name from paddle.fluid.framework import Variable from paddle.fluid.layer_helper import LayerHelper +from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar __all__ = [ 'create_bool_as_type', @@ -71,14 +72,14 @@ def data_layer_not_check(name, shape, dtype='float32', lod_level=0): if shape[i] is None: shape[i] = -1 - return helper.create_global_variable(name=name, - shape=shape, - dtype=dtype, - type=core.VarDesc.VarType.LOD_TENSOR, - stop_gradient=True, - lod_level=lod_level, - is_data=True, - need_check_feed=False) + return helper.create_variable(name=name, + shape=shape, + dtype=dtype, + type=core.VarDesc.VarType.LOD_TENSOR, + stop_gradient=True, + lod_level=lod_level, + is_data=True, + need_check_feed=False) def create_undefined_var(name): @@ -137,10 +138,11 @@ def to_static_variable(x): return paddle.full(shape=[1], dtype='bool', fill_value=x) if isinstance(x, float): return paddle.full(shape=[1], dtype='float64', fill_value=x) - if isinstance(x, six.integer_types): return paddle.full(shape=[1], dtype='int64', fill_value=x) - + if isinstance(x, UndefinedVar): + return data_layer_not_check(unique_name.generator("loop_undefined_var"), + [-1]) return x @@ -185,7 +187,7 @@ def remove_attribute(x): template = """ def {func_name}(): nonlocal {nonlocal_vars} - return {vars} + return {vars}, """ func_def = template.format( func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX), @@ -231,7 +233,7 @@ def remove_attribute(x): template = """ def {func_name}({args}): nonlocal {nonlocal_vars} - {vars} = {args} + {vars}, = {args} """ func_def = template.format( func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py index 55dff1c92bb20..bd67ff2a489c0 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py @@ -177,7 +177,6 @@ def test_list_pop_in_for_loop(x, iter_num): one = fluid.layers.ones(shape=[1], dtype="int32") for i in range(one.numpy()[0]): item = a.pop() - return a[0], item, b[1] From 119918fb537c0fe9b76865218f2d10a5b061a606 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Tue, 28 Jun 2022 08:05:53 +0000 Subject: [PATCH 4/9] fix bug --- .../dygraph_to_static/convert_operators.py | 12 +++++++--- .../dygraph_to_static/ifelse_transformer.py | 4 ++++ .../dygraph_to_static/loop_transformer.py | 22 ------------------- .../fluid/dygraph/dygraph_to_static/utils.py | 14 ++++++++---- .../dygraph_to_static/variable_trans_func.py | 22 +++++-------------- 5 files changed, 28 insertions(+), 46 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index 248d013e13c9a..4b4ec17614d75 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -50,12 +50,18 @@ def convert_while_loop(cond, body, getter, setter): def _run_paddle_while_loop(cond, body, getter, setter): # NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors. + def to_list(x): + if isinstance(x, (tuple, list)): return x + return [x] + # UndefinedVar will become data layer not check. - loop_vars = [to_static_variable(var) for var in getter()] - setter(loop_vars) # change the non-local var to variable + loop_vars = [to_static_variable(var) for var in to_list(getter())] + setter(loop_vars if len(loop_vars) > 1 else + loop_vars[0]) # change the non-local var to variable # variable maybe modified to inner var. change it into loop_vars = control_flow.while_loop(cond, body, loop_vars) - setter(loop_vars) # change the inner-scope var + setter(loop_vars if len(loop_vars) > 1 else + loop_vars[0]) # change the non-local var to variable return loop_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py index 47d783c8b6ccc..b61332de50e25 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py @@ -416,7 +416,11 @@ def _vars_loaded(ids_dict): # modified vars body_modified_vars = _modified_vars(if_vars_dict, parent_vars_dict) + body_modified_vars = set(filter(lambda x: x != "__args", + body_modified_vars)) orelse_modified_vars = _modified_vars(else_vars_dict, parent_vars_dict) + orelse_modified_vars = set( + filter(lambda x: x != "__args", orelse_modified_vars)) modified_vars = body_modified_vars | orelse_modified_vars # new vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index 23f0648455456..b145e641bc632 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -39,7 +39,6 @@ FOR_CONDITION_PREFIX = 'for_loop_condition' FOR_BODY_PREFIX = 'for_loop_body' -GENERATE_VARIABLE_PREFIX = 'generate_variable' ARGS_NAME = '__args' @@ -688,12 +687,6 @@ def get_for_stmt_nodes(self, node): decorator_list=[], returns=None, type_comment=None) - # TODO(??? xiongkun: why we need rename ?) - for name in loop_var_names: - if "." in name: - rename_transformer = RenameTransformer(condition_func_node) - rename_transformer.rename( - name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(condition_func_node) # 6. create & append loop body function node @@ -717,11 +710,6 @@ def get_for_stmt_nodes(self, node): decorator_list=[], returns=None, type_comment=None) - for name in loop_var_names: - if "." in name: - rename_transformer = RenameTransformer(body_func_node) - rename_transformer.rename( - name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(body_func_node) get_args_node = create_get_args_node(nonlocal_names) @@ -780,11 +768,6 @@ def get_while_stmt_nodes(self, node): returns=None, type_comment=None) - for name in loop_var_names: - if "." in name: - rename_transformer = RenameTransformer(condition_func_node) - rename_transformer.rename( - name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(condition_func_node) new_body = node.body @@ -807,11 +790,6 @@ def get_while_stmt_nodes(self, node): decorator_list=[], returns=None, type_comment=None) - for name in loop_var_names: - if "." in name: - rename_transformer = RenameTransformer(body_func_node) - rename_transformer.rename( - name, unique_name.generate(GENERATE_VARIABLE_PREFIX)) new_stmts.append(body_func_node) get_args_node = create_get_args_node(nonlocal_names) set_args_node = create_set_args_node(nonlocal_names) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index e1ae58569c53c..b454ef19a2feb 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -412,10 +412,16 @@ def generate_name_node(name_ids, ctx=gast.Load(), gen_tuple_if_single=False): raise TypeError( 'name_ids must be list or tuple or set, but received %s' % type(type(name_ids))) - gast_names = [ - gast.Name(id=name_id, ctx=ctx, annotation=None, type_comment=None) - for name_id in name_ids - ] + + def create_node_for_name(name): + if '.' not in name: + return gast.Name(id=name, + ctx=ctx, + annotation=None, + type_comment=None) + return gast.parse(name).body[0].value + + gast_names = [create_node_for_name(name_id) for name_id in name_ids] if len(gast_names) == 1 and not gen_tuple_if_single: name_node = gast_names[0] else: diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index 83bae73dc48ab..0c83c7a22efe1 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -90,11 +90,7 @@ def create_undefined_var(name): def create_nonlocal_stmt_node(names): assert isinstance(names, (list, tuple)) - def remove_attribute(x): - if '.' in x: return x.split('.')[0] - else: return x - - mapped = list(map(remove_attribute, names)) + mapped = list(filter(lambda n: '.' not in n, names)) names = sorted( mapped, key=mapped.index) # to keep the order, we can't use set() to unique @@ -176,18 +172,14 @@ def {func_name}(): if not names: return empty_node() - def remove_attribute(x): - if '.' in x: return x.split('.')[0] - else: return x - - mapped = list(map(remove_attribute, names)) + mapped = list(filter(lambda n: '.' not in n, names)) nonlocal_names = sorted( mapped, key=mapped.index) # to keep the order, we can't use set() to unique template = """ def {func_name}(): nonlocal {nonlocal_vars} - return {vars}, + return {vars} """ func_def = template.format( func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX), @@ -222,18 +214,14 @@ def {func_name}({args}): if not names: return empty_node() - def remove_attribute(x): - if '.' in x: return x.split('.')[0] - else: return x - - mapped = list(map(remove_attribute, names)) + mapped = list(filter(lambda n: '.' not in n, names)) nonlocal_names = sorted( mapped, key=mapped.index) # to keep the order, we can't use set() to unique template = """ def {func_name}({args}): nonlocal {nonlocal_vars} - {vars}, = {args} + {vars} = {args} """ func_def = template.format( func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), From 9e2a2fdbb5510d719c71e52ce917041dedd7cae6 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Wed, 29 Jun 2022 09:51:11 +0000 Subject: [PATCH 5/9] fix --- .../paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index 306547ba47a8b..99f50d7238772 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -29,6 +29,7 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import ForNodeVisitor from paddle.fluid.dygraph.dygraph_to_static.utils import RenameTransformer from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_undefined_var +from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node from paddle.fluid.dygraph.dygraph_to_static.utils import create_nonlocal_stmt_node, create_get_args_node, create_set_args_node __all__ = ['LoopTransformer', 'NameVisitor'] From 786eff14f3079cc9bdc916f4585aefa712a0c0a8 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Wed, 29 Jun 2022 11:15:34 +0000 Subject: [PATCH 6/9] fix code --- .../dygraph_to_static/convert_operators.py | 4 +- .../dygraph_to_static/ifelse_transformer.py | 10 ++-- .../dygraph_to_static/loop_transformer.py | 3 +- .../dygraph_to_static/variable_trans_func.py | 48 +------------------ 4 files changed, 9 insertions(+), 56 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index f5663ea10e818..a6cab0db51380 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -41,14 +41,14 @@ def convert_while_loop(cond, body, getter, setter): # If loop_vars is changed during cond callable, then it causes bug, but current logical_and/logical_not/... doesn't change the loop_vars. pred = cond() if isinstance(pred, Variable): - loop_vars = _run_paddle_while_loop(cond, body, getter, setter) + loop_vars = _run_paddle_while(cond, body, getter, setter) else: loop_vars = _run_py_while(cond, body, getter, setter) return loop_vars -def _run_paddle_while_loop(cond, body, getter, setter): +def _run_paddle_while(cond, body, getter, setter): # NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors. def to_list(x): if isinstance(x, (tuple, list)): return x diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py index 58fb00680963d..d4449f6dfc24e 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py @@ -416,22 +416,22 @@ def _vars_loaded(ids_dict): # modified vars body_modified_vars = _modified_vars(if_vars_dict, parent_vars_dict) - body_modified_vars = set(filter(lambda x: x != "__args", - body_modified_vars)) + body_modified_vars = set( + filter(lambda x: x != ARGS_NAME, body_modified_vars)) orelse_modified_vars = _modified_vars(else_vars_dict, parent_vars_dict) orelse_modified_vars = set( - filter(lambda x: x != "__args", orelse_modified_vars)) + filter(lambda x: x != ARGS_NAME, orelse_modified_vars)) modified_vars = body_modified_vars | orelse_modified_vars # new vars # TODO(remove __args when new FunctionScopeAnalysis has been used.) body_new_vars = set([ var for var in _vars_with_store(if_vars_dict) - if var not in parent_vars_dict and var != "__args" + if var not in parent_vars_dict and var != ARGS_NAME ]) orelse_new_vars = set([ var for var in _vars_with_store(else_vars_dict) - if var not in parent_vars_dict and var != "__args" + if var not in parent_vars_dict and var != ARGS_NAME ]) new_vars_in_body_or_orelse = body_new_vars | orelse_new_vars new_vars_in_one_of_body_or_orelse = body_new_vars ^ orelse_new_vars diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index 99f50d7238772..63fc4f0489acb 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -31,6 +31,7 @@ from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_undefined_var from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node from paddle.fluid.dygraph.dygraph_to_static.utils import create_nonlocal_stmt_node, create_get_args_node, create_set_args_node +from paddle.fluid.dygraph.dygraph_to_static.ifelse_transformer import ARGS_NAME __all__ = ['LoopTransformer', 'NameVisitor'] @@ -40,8 +41,6 @@ FOR_CONDITION_PREFIX = 'for_loop_condition' FOR_BODY_PREFIX = 'for_loop_body' -ARGS_NAME = '__args' - def create_while_nodes(condition_name, body_name, loop_var_names, getter_name, setter_name): diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index 5c6b716cb09a6..3209a67111c2f 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -22,62 +22,16 @@ from paddle.fluid import unique_name from paddle.fluid.framework import Variable from paddle.fluid.layer_helper import LayerHelper -from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar +from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar, data_layer_not_check __all__ = [ 'create_bool_as_type', 'create_fill_constant_node', 'to_static_variable', 'create_undefined_var', - 'data_layer_not_check', ] -def data_layer_not_check(name, shape, dtype='float32', lod_level=0): - """ - This function creates a Tensor on the global block. The created Tensor - doesn't check the dtype and the shape of feed data because dygraph input - data can be various-length. This API is used in translating dygraph into - static graph. - - Note: - The default :code:`stop_gradient` attribute of the Tensor created by - this API is true, which means the gradient won't be passed backward - through the data Tensor. Set :code:`var.stop_gradient = False` If - user would like to pass backward gradient. - - Args: - name (str): The name/alias of the Tensor, see :ref:`api_guide_Name` - for more details. - shape (list|tuple): List|Tuple of integers declaring the shape. You can - set "None" at a dimension to indicate the dimension can be of any - size. For example, it is useful to set changeable batch size as "None" - dtype (np.dtype|VarType|str, optional): The type of the data. Supported - dtype: bool, float16, float32, float64, int8, int16, int32, int64, - uint8. Default: float32 - lod_level (int, optional): The LoD level of the LoDTensor. Usually users - don't have to set this value. For more details about when and how to - use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0 - - Returns: - Tensor: The global Tensor that gives access to the data. - """ - helper = LayerHelper('data', **locals()) - shape = list(shape) - for i in six.moves.range(len(shape)): - if shape[i] is None: - shape[i] = -1 - - return helper.create_variable(name=name, - shape=shape, - dtype=dtype, - type=core.VarDesc.VarType.LOD_TENSOR, - stop_gradient=True, - lod_level=lod_level, - is_data=True, - need_check_feed=False) - - def create_undefined_var(name): func_code = "{} = _jst.UndefinedVar('{}')".format(name, name) return gast.parse(func_code).body[0] From 2572e48e2b7fced2527c07a7981593700657d675 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Wed, 29 Jun 2022 12:30:39 +0000 Subject: [PATCH 7/9] fix --- .../fluid/dygraph/dygraph_to_static/utils.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index 71b5b4ec89739..fa99c749e68f2 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -59,6 +59,51 @@ def visit(self, node): return ret +def data_layer_not_check(name, shape, dtype='float32', lod_level=0): + """ + This function creates a Tensor on the global block. The created Tensor + doesn't check the dtype and the shape of feed data because dygraph input + data can be various-length. This API is used in translating dygraph into + static graph. + + Note: + The default :code:`stop_gradient` attribute of the Tensor created by + this API is true, which means the gradient won't be passed backward + through the data Tensor. Set :code:`var.stop_gradient = False` If + user would like to pass backward gradient. + + Args: + name (str): The name/alias of the Tensor, see :ref:`api_guide_Name` + for more details. + shape (list|tuple): List|Tuple of integers declaring the shape. You can + set "None" at a dimension to indicate the dimension can be of any + size. For example, it is useful to set changeable batch size as "None" + dtype (np.dtype|VarType|str, optional): The type of the data. Supported + dtype: bool, float16, float32, float64, int8, int16, int32, int64, + uint8. Default: float32 + lod_level (int, optional): The LoD level of the LoDTensor. Usually users + don't have to set this value. For more details about when and how to + use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0 + + Returns: + Tensor: The global Tensor that gives access to the data. + """ + helper = LayerHelper('data', **locals()) + shape = list(shape) + for i in six.moves.range(len(shape)): + if shape[i] is None: + shape[i] = -1 + + return helper.create_variable(name=name, + shape=shape, + dtype=dtype, + type=core.VarDesc.VarType.LOD_TENSOR, + stop_gradient=True, + lod_level=lod_level, + is_data=True, + need_check_feed=False) + + # imp is deprecated in python3 from importlib.machinery import SourceFileLoader From 275933b781555491c9970ca426eb367a62a44767 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Wed, 29 Jun 2022 13:10:33 +0000 Subject: [PATCH 8/9] fix --- python/paddle/fluid/dygraph/dygraph_to_static/utils.py | 1 + .../fluid/dygraph/dygraph_to_static/variable_trans_func.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index fa99c749e68f2..39d4192cabaf7 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -30,6 +30,7 @@ import paddle from paddle.fluid import unique_name from paddle.fluid.data_feeder import convert_dtype +from paddle.fluid.layer_helper import LayerHelper # Note(Aurelius): Do not forget the dot `.` to distinguish other # module such as paddlenlp. diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index 3209a67111c2f..72738d3171883 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -21,7 +21,6 @@ from paddle.fluid import core from paddle.fluid import unique_name from paddle.fluid.framework import Variable -from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar, data_layer_not_check __all__ = [ From 6645b4ca595cf01887467f897623b162fbb3b5ea Mon Sep 17 00:00:00 2001 From: xiongkun Date: Wed, 29 Jun 2022 14:30:38 +0000 Subject: [PATCH 9/9] fix --- python/paddle/fluid/dygraph/dygraph_to_static/utils.py | 1 + .../fluid/dygraph/dygraph_to_static/variable_trans_func.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index 39d4192cabaf7..466e9ee4d34c1 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -31,6 +31,7 @@ from paddle.fluid import unique_name from paddle.fluid.data_feeder import convert_dtype from paddle.fluid.layer_helper import LayerHelper +from paddle.fluid import core # Note(Aurelius): Do not forget the dot `.` to distinguish other # module such as paddlenlp. diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index 72738d3171883..9bbce59fc54ce 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -18,7 +18,6 @@ import paddle import textwrap from paddle.utils import gast -from paddle.fluid import core from paddle.fluid import unique_name from paddle.fluid.framework import Variable from paddle.fluid.dygraph.dygraph_to_static.utils import UndefinedVar, data_layer_not_check