From 93888b5b03c978b1337f66eef3d7071e82b29c4e Mon Sep 17 00:00:00 2001 From: Kaedeharai Date: Thu, 21 Sep 2023 11:07:56 +0800 Subject: [PATCH] delete UP031 --- pyproject.toml | 1 - python/paddle/base/backward.py | 5 +- python/paddle/base/core.py | 9 ++-- python/paddle/base/data_feeder.py | 12 ++--- python/paddle/base/dygraph/base.py | 3 +- python/paddle/base/executor.py | 12 ++--- python/paddle/base/framework.py | 50 ++++++++----------- .../base/layers/layer_function_generator.py | 3 +- python/paddle/base/layers/math_op_patch.py | 5 +- python/paddle/base/trainer_desc.py | 8 +-- 10 files changed, 43 insertions(+), 65 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e11ab2108c2be..4178210785276 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,6 @@ ignore = [ # Temporarily ignored "python/paddle/base/**" = [ "UP032", - "UP031", "C408", "UP030", "F522", diff --git a/python/paddle/base/backward.py b/python/paddle/base/backward.py index 563e423e0c7ea..5b36cb8368408 100755 --- a/python/paddle/base/backward.py +++ b/python/paddle/base/backward.py @@ -237,7 +237,7 @@ def modify_forward_desc_for_recompute(self): def _pretty_op_desc_(op_desc, prefix): - out_s = "%s\tname:[%s]\n%s \tinputs:[%s]\n%s \toutputs:[%s]" % ( + out_s = "{}\tname:[{}]\n{} \tinputs:[{}]\n{} \toutputs:[{}]".format( prefix + "_op", str(op_desc.type()), prefix + "_input", @@ -2461,8 +2461,7 @@ def calc_gradient_helper( raise ValueError("all targets must be in the same block") if target.shape != grad.shape: raise ValueError( - "The shapes of target and grad are different: %s %s" - % (target.name, grad.name) + "The shapes of target and grad are different: {} {}".format(target.name, grad.name) ) target_grad_map[_append_grad_suffix_(target.name)] = grad.name input_grad_names_set.add(grad.name) diff --git a/python/paddle/base/core.py b/python/paddle/base/core.py index df90a6ace8582..7dfcaff2222c0 100644 --- a/python/paddle/base/core.py +++ b/python/paddle/base/core.py @@ -47,11 +47,10 @@ if os.name == 'nt': executable_path = os.path.abspath(os.path.dirname(sys.executable)) raise ImportError( - """NOTE: You may need to run \"set PATH=%s;%%PATH%%\" + """NOTE: You may need to run \"set PATH={};%PATH%\" if you encounters \"DLL load failed\" errors. If you have python - installed in other directory, replace \"%s\" with your own - directory. The original error is: \n %s""" - % (executable_path, executable_path, str(e)) + installed in other directory, replace \"{}\" with your own + directory. The original error is: \n {}""".format(executable_path, executable_path, str(e)) ) else: raise ImportError( @@ -197,7 +196,7 @@ def run_shell_command(cmd): def get_dso_path(core_so, dso_name): if core_so and dso_name: return run_shell_command( - "ldd %s|grep %s|awk '{print $3}'" % (core_so, dso_name) + "ldd {}|grep {}|awk '{{print $3}}'".format(core_so, dso_name) ) else: return None diff --git a/python/paddle/base/data_feeder.py b/python/paddle/base/data_feeder.py index 40154e1a0d429..1d496216b21ce 100644 --- a/python/paddle/base/data_feeder.py +++ b/python/paddle/base/data_feeder.py @@ -183,8 +183,7 @@ def check_type(input, input_name, expected_type, op_name, extra_message=''): ) if not isinstance(input, expected_type): raise TypeError( - "The type of '%s' in %s must be %s, but received %s. %s" - % (input_name, op_name, expected_type, type(input), extra_message) + "The type of '{}' in {} must be {}, but received {}. {}".format(input_name, op_name, expected_type, type(input), extra_message) ) @@ -196,8 +195,7 @@ def check_dtype( return if convert_dtype(input_dtype) in ['float16']: warnings.warn( - "The data type of '%s' in %s only support float16 in GPU now. %s" - % (input_name, op_name, extra_message) + "The data type of '{}' in {} only support float16 in GPU now. {}".format(input_name, op_name, extra_message) ) if convert_dtype(input_dtype) in ['uint16'] and op_name not in [ 'reshape', @@ -205,13 +203,11 @@ def check_dtype( 'scale', ]: warnings.warn( - "The data type of '%s' in %s only support bfloat16 in OneDNN now. %s" - % (input_name, op_name, extra_message) + "The data type of '{}' in {} only support bfloat16 in OneDNN now. {}".format(input_name, op_name, extra_message) ) if convert_dtype(input_dtype) not in expected_dtype: raise TypeError( - "The data type of '%s' in %s must be %s, but received %s. %s" - % ( + "The data type of '{}' in {} must be {}, but received {}. {}".format( input_name, op_name, expected_dtype, diff --git a/python/paddle/base/dygraph/base.py b/python/paddle/base/dygraph/base.py index 7edb748026d84..449538ce80c80 100644 --- a/python/paddle/base/dygraph/base.py +++ b/python/paddle/base/dygraph/base.py @@ -891,8 +891,7 @@ def to_variable(value, name=None, zero_copy=None, dtype=None): ) if not isinstance(value, support_type): raise TypeError( - "The type of 'value' in base.dygraph.to_variable must be %s, but received %s." - % (support_type, type(value)) + "The type of 'value' in base.dygraph.to_variable must be {}, but received {}.".format(support_type, type(value)) ) if isinstance(value, (core.eager.Tensor, framework.Variable)): return value diff --git a/python/paddle/base/executor.py b/python/paddle/base/executor.py index 9ea3d566c824a..31dc5b5b1a420 100755 --- a/python/paddle/base/executor.py +++ b/python/paddle/base/executor.py @@ -261,8 +261,7 @@ def check_feed_shape_type(var, feed, num_places=1): else feed._dtype() ) raise ValueError( - 'The data type of fed Variable %r must be %r, but received %r' - % (var.name, var_dtype_format, feed_dtype_format) + 'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(var.name, var_dtype_format, feed_dtype_format) ) return True @@ -310,8 +309,7 @@ def pir_check_feed_shape_type(feed, name, target_shape, dtype, num_places=1): else feed._dtype() ) raise ValueError( - 'The data type of fed Variable %r must be %r, but received %r' - % (name, var_dtype_format, feed_dtype_format) + 'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(name, var_dtype_format, feed_dtype_format) ) return True @@ -496,7 +494,7 @@ def _add_feed_fetch_ops( for i, var in enumerate(fetch_list): assert isinstance( var, (Variable, str) - ), "Wrong type for fetch_list[%s]: %s" % (i, type(var)) + ), "Wrong type for fetch_list[{}]: {}".format(i, type(var)) global_block.append_op( type=fetch_op, inputs={'X': [var]}, @@ -518,7 +516,7 @@ def _add_pir_fetch_ops(program, fetch_list, fetch_var_name): for i, fetch_input in enumerate(fetch_list): assert isinstance( fetch_input, OpResult - ), "Wrong type for fetch_list[%s]: %s" % (i, type(fetch_input)) + ), "Wrong type for fetch_list[{}]: {}".format(i, type(fetch_input)) paddle._ir_ops.fetch(fetch_input, fetch_var_name + str(i), i) @@ -2802,7 +2800,7 @@ def _add_fetch_ops( for i, var in enumerate(fetch_list): assert isinstance( var, (Variable, str) - ), "Wrong type for fetch_list[%s]: %s" % (i, type(var)) + ), "Wrong type for fetch_list[{}]: {}".format(i, type(var)) global_block.append_op( type=fetch_op, inputs={'X': [var]}, diff --git a/python/paddle/base/framework.py b/python/paddle/base/framework.py index 0440af415a7d0..d284a50909fe8 100644 --- a/python/paddle/base/framework.py +++ b/python/paddle/base/framework.py @@ -546,17 +546,15 @@ def version_cmp(ver_a, ver_b): if version_cmp(version_installed, zero_version) == 0: if max_version is not None: warnings.warn( - "PaddlePaddle version in [%s, %s] required, but %s installed. " + "PaddlePaddle version in [{}, {}] required, but {} installed. " "Maybe you are using a develop version, " - "please make sure the version is good with your code." - % (min_version, max_version, fluid_version.full_version) + "please make sure the version is good with your code.".format(min_version, max_version, fluid_version.full_version) ) else: warnings.warn( - "PaddlePaddle version %s or higher is required, but %s installed, " + "PaddlePaddle version {} or higher is required, but {} installed, " "Maybe you are using a develop version, " - "please make sure the version is good with your code." - % (min_version, fluid_version.full_version) + "please make sure the version is good with your code.".format(min_version, fluid_version.full_version) ) return @@ -576,15 +574,13 @@ def version_cmp(ver_a, ver_b): or version_cmp(version_installed, min_version_to_check) < 0 ): raise Exception( - "VersionError: PaddlePaddle version in [%s, %s] required, but %s installed." - % (min_version, max_version, fluid_version.full_version) + "VersionError: PaddlePaddle version in [{}, {}] required, but {} installed.".format(min_version, max_version, fluid_version.full_version) ) else: if version_cmp(version_installed, min_version_to_check) < 0: raise Exception( - "VersionError: PaddlePaddle version %s or higher is required, but %s installed, " - "please upgrade your PaddlePaddle to %s or other higher version." - % (min_version, fluid_version.full_version, min_version) + "VersionError: PaddlePaddle version {} or higher is required, but {} installed, " + "please upgrade your PaddlePaddle to {} or other higher version.".format(min_version, fluid_version.full_version, min_version) ) @@ -648,11 +644,10 @@ def _set_pipeline_stage(stage): def _fake_interface_only_(func): def __impl__(*args, **kwargs): raise AssertionError( - "'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n" + "'{}' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n" " 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n" " 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. " - "If you have to translate dynamic graph to static graph, please use other API to replace '%s'." - % (func.__name__, func.__name__) + "If you have to translate dynamic graph to static graph, please use other API to replace '{}'.".format(func.__name__, func.__name__) ) return __impl__ @@ -1910,7 +1905,7 @@ def to_string(self, throw_on_error, with_details=False): if with_details: additional_attr = ("error_clip",) for attr_name in additional_attr: - res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name)) + res_str += "{}: {}\n".format(attr_name, getattr(self, attr_name)) return res_str @@ -3082,18 +3077,16 @@ def find_name(var_list, name): raise ValueError( ( "Incorrect setting for output(s) of " - "operator \"%s\", should set: [%s]." - ) - % (type, m.name) + "operator \"{}\", should set: [{}]." + ).format(type, m.name) ) else: if not ((m.name in outputs) or m.dispensable): raise ValueError( ( "Incorrect setting for output(s) of " - "operator \"%s\", should set: [%s]." - ) - % (type, m.name) + "operator \"{}\", should set: [{}]." + ).format(type, m.name) ) for out_proto in proto.outputs: @@ -3136,7 +3129,7 @@ def find_name(var_list, name): for attr_name in extra_attrs_map.keys(): if os.environ.get('FLAGS_print_extra_attrs', '0') == '1': warnings.warn( - "op %s use extra_attr: %s" % (type, attr_name) + "op {} use extra_attr: {}".format(type, attr_name) ) if (attr_name not in op_attrs) or ( @@ -3154,7 +3147,7 @@ def find_name(var_list, name): for attr in attrs: if attr in op_attrs.keys(): warnings.warn( - "op %s use extra_attr: %s" % (type, attr) + "op {} use extra_attr: {}".format(type, attr) ) if type in special_op_attrs: @@ -3167,8 +3160,7 @@ def find_name(var_list, name): and default_value != op_attrs[a_name] ): warnings.warn( - "op %s's attr %s = %s is not the default value: %s" - % ( + "op {}'s attr {} = {} is not the default value: {}".format( type, a_name, op_attrs[a_name], @@ -3757,8 +3749,7 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs): and inplace_map.get("Input", None) == "Out" ): raise ValueError( - 'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.' - % (op_type, k) + 'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(op_type, k) ) elif isinstance(v, list): for var in v: @@ -3768,8 +3759,7 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs): and inplace_map.get("Input", None) == "Out" ): raise ValueError( - 'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.' - % (op_type, k) + 'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(op_type, k) ) @@ -7361,7 +7351,7 @@ def to_string(self, throw_on_error, with_details=False): "need_clip", ) for attr_name in additional_attr: - res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name)) + res_str += "{}: {}\n".format(attr_name, getattr(self, attr_name)) else: res_str = Variable.to_string(self, throw_on_error, False) return res_str diff --git a/python/paddle/base/layers/layer_function_generator.py b/python/paddle/base/layers/layer_function_generator.py index 1b1b85d00ea42..674a070afee97 100644 --- a/python/paddle/base/layers/layer_function_generator.py +++ b/python/paddle/base/layers/layer_function_generator.py @@ -336,8 +336,7 @@ def func(x, name=None): and x.is_view_var ): raise ValueError( - 'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call %s = %s.assign().' - % (inplace_op_type, x.name, x.name, x.nameb) + 'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call {} = {}.assign().'.format(inplace_op_type, x.name, x.name, x.nameb) ) return generate_activation_fn(origin_op_type)(x, name) diff --git a/python/paddle/base/layers/math_op_patch.py b/python/paddle/base/layers/math_op_patch.py index 06f384eae23d1..27d17076191a4 100644 --- a/python/paddle/base/layers/math_op_patch.py +++ b/python/paddle/base/layers/math_op_patch.py @@ -553,10 +553,9 @@ def __impl__(self, other_var): file_name = stack[1] line_num = stack[2] warnings.warn( - "%s:%s\nThe behavior of expression %s has been unified with %s(X, Y, axis=-1) from Paddle 2.0. " + "{}:{}\nThe behavior of expression {} has been unified with {}(X, Y, axis=-1) from Paddle 2.0. " "If your code works well in the older versions but crashes in this version, try to use " - "%s(X, Y, axis=0) instead of %s. This transitional warning will be dropped in the future." - % ( + "{}(X, Y, axis=0) instead of {}. This transitional warning will be dropped in the future.".format( file_name, line_num, EXPRESSION_MAP[method_name], diff --git a/python/paddle/base/trainer_desc.py b/python/paddle/base/trainer_desc.py index 48cc427ac8e7e..1d6d518d5b99b 100644 --- a/python/paddle/base/trainer_desc.py +++ b/python/paddle/base/trainer_desc.py @@ -244,7 +244,7 @@ def _set_copy_table_config(self, config_dict): if len(src_sparse_tables) != len(dest_sparse_tables): raise ValueError( "len(src_sparse_tables) != len(dest_sparse_tables)," - " %s vs %s" % (len(src_sparse_tables), len(dest_sparse_tables)) + " {} vs {}".format(len(src_sparse_tables), len(dest_sparse_tables)) ) for i in src_sparse_tables: config.src_sparse_tables.append(i) @@ -260,7 +260,7 @@ def _set_copy_table_config(self, config_dict): if len(src_dense_tables) != len(dest_dense_tables): raise ValueError( "len(src_dense_tables) != len(dest_dense_tables)," - " %s vs %s" % (len(src_dense_tables), len(dest_dense_tables)) + " {} vs {}".format(len(src_dense_tables), len(dest_dense_tables)) ) for i in src_dense_tables: config.src_dense_tables.append(i) @@ -277,8 +277,8 @@ def _set_copy_table_config(self, config_dict): dest_var_list = [dest_var_list] if len(src_var_list) != len(dest_var_list): raise ValueError( - "len(src_var_list) != len(dest_var_list), %s vs" - " %s" % (len(src_var_list), len(dest_var_list)) + "len(src_var_list) != len(dest_var_list), {} vs" + " {}".format(len(src_var_list), len(dest_var_list)) ) for i in src_var_list: config.src_var_list.append(i)