Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][task 7] enable Ruff UP031 rule in python/paddle/base #57571

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ ignore = [
# Temporarily ignored
"python/paddle/base/**" = [
"UP032",
"UP031",
"C408",
"UP030",
"F522",
Expand Down
5 changes: 2 additions & 3 deletions python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def modify_forward_desc_for_recompute(self):


def _pretty_op_desc_(op_desc, prefix):
out_s = "%s\tname:[%s]\n%s \tinputs:[%s]\n%s \toutputs:[%s]" % (
out_s = "{}\tname:[{}]\n{} \tinputs:[{}]\n{} \toutputs:[{}]".format(
prefix + "_op",
str(op_desc.type()),
prefix + "_input",
Expand Down Expand Up @@ -2461,8 +2461,7 @@ def calc_gradient_helper(
raise ValueError("all targets must be in the same block")
if target.shape != grad.shape:
raise ValueError(
"The shapes of target and grad are different: %s %s"
% (target.name, grad.name)
"The shapes of target and grad are different: {} {}".format(target.name, grad.name)
)
target_grad_map[_append_grad_suffix_(target.name)] = grad.name
input_grad_names_set.add(grad.name)
Expand Down
9 changes: 4 additions & 5 deletions python/paddle/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,10 @@
if os.name == 'nt':
executable_path = os.path.abspath(os.path.dirname(sys.executable))
raise ImportError(
"""NOTE: You may need to run \"set PATH=%s;%%PATH%%\"
"""NOTE: You may need to run \"set PATH={};%PATH%\"
if you encounters \"DLL load failed\" errors. If you have python
installed in other directory, replace \"%s\" with your own
directory. The original error is: \n %s"""
% (executable_path, executable_path, str(e))
installed in other directory, replace \"{}\" with your own
directory. The original error is: \n {}""".format(executable_path, executable_path, str(e))
)
else:
raise ImportError(
Expand Down Expand Up @@ -197,7 +196,7 @@ def run_shell_command(cmd):
def get_dso_path(core_so, dso_name):
if core_so and dso_name:
return run_shell_command(
"ldd %s|grep %s|awk '{print $3}'" % (core_so, dso_name)
"ldd {}|grep {}|awk '{{print $3}}'".format(core_so, dso_name)
)
else:
return None
Expand Down
12 changes: 4 additions & 8 deletions python/paddle/base/data_feeder.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,7 @@ def check_type(input, input_name, expected_type, op_name, extra_message=''):
)
if not isinstance(input, expected_type):
raise TypeError(
"The type of '%s' in %s must be %s, but received %s. %s"
% (input_name, op_name, expected_type, type(input), extra_message)
"The type of '{}' in {} must be {}, but received {}. {}".format(input_name, op_name, expected_type, type(input), extra_message)
)


Expand All @@ -196,22 +195,19 @@ def check_dtype(
return
if convert_dtype(input_dtype) in ['float16']:
warnings.warn(
"The data type of '%s' in %s only support float16 in GPU now. %s"
% (input_name, op_name, extra_message)
"The data type of '{}' in {} only support float16 in GPU now. {}".format(input_name, op_name, extra_message)
)
if convert_dtype(input_dtype) in ['uint16'] and op_name not in [
'reshape',
'lookup_table',
'scale',
]:
warnings.warn(
"The data type of '%s' in %s only support bfloat16 in OneDNN now. %s"
% (input_name, op_name, extra_message)
"The data type of '{}' in {} only support bfloat16 in OneDNN now. {}".format(input_name, op_name, extra_message)
)
if convert_dtype(input_dtype) not in expected_dtype:
raise TypeError(
"The data type of '%s' in %s must be %s, but received %s. %s"
% (
"The data type of '{}' in {} must be {}, but received {}. {}".format(
input_name,
op_name,
expected_dtype,
Expand Down
3 changes: 1 addition & 2 deletions python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -891,8 +891,7 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
)
if not isinstance(value, support_type):
raise TypeError(
"The type of 'value' in base.dygraph.to_variable must be %s, but received %s."
% (support_type, type(value))
"The type of 'value' in base.dygraph.to_variable must be {}, but received {}.".format(support_type, type(value))
)
if isinstance(value, (core.eager.Tensor, framework.Variable)):
return value
Expand Down
12 changes: 5 additions & 7 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,8 +261,7 @@ def check_feed_shape_type(var, feed, num_places=1):
else feed._dtype()
)
raise ValueError(
'The data type of fed Variable %r must be %r, but received %r'
% (var.name, var_dtype_format, feed_dtype_format)
'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(var.name, var_dtype_format, feed_dtype_format)
)
return True

Expand Down Expand Up @@ -310,8 +309,7 @@ def pir_check_feed_shape_type(feed, name, target_shape, dtype, num_places=1):
else feed._dtype()
)
raise ValueError(
'The data type of fed Variable %r must be %r, but received %r'
% (name, var_dtype_format, feed_dtype_format)
'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(name, var_dtype_format, feed_dtype_format)
)
return True

Expand Down Expand Up @@ -496,7 +494,7 @@ def _add_feed_fetch_ops(
for i, var in enumerate(fetch_list):
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
), "Wrong type for fetch_list[{}]: {}".format(i, type(var))
global_block.append_op(
type=fetch_op,
inputs={'X': [var]},
Expand All @@ -518,7 +516,7 @@ def _add_pir_fetch_ops(program, fetch_list, fetch_var_name):
for i, fetch_input in enumerate(fetch_list):
assert isinstance(
fetch_input, OpResult
), "Wrong type for fetch_list[%s]: %s" % (i, type(fetch_input))
), "Wrong type for fetch_list[{}]: {}".format(i, type(fetch_input))
paddle._ir_ops.fetch(fetch_input, fetch_var_name + str(i), i)


Expand Down Expand Up @@ -2802,7 +2800,7 @@ def _add_fetch_ops(
for i, var in enumerate(fetch_list):
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
), "Wrong type for fetch_list[{}]: {}".format(i, type(var))
global_block.append_op(
type=fetch_op,
inputs={'X': [var]},
Expand Down
50 changes: 20 additions & 30 deletions python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,17 +546,15 @@ def version_cmp(ver_a, ver_b):
if version_cmp(version_installed, zero_version) == 0:
if max_version is not None:
warnings.warn(
"PaddlePaddle version in [%s, %s] required, but %s installed. "
"PaddlePaddle version in [{}, {}] required, but {} installed. "
"Maybe you are using a develop version, "
"please make sure the version is good with your code."
% (min_version, max_version, fluid_version.full_version)
"please make sure the version is good with your code.".format(min_version, max_version, fluid_version.full_version)
)
else:
warnings.warn(
"PaddlePaddle version %s or higher is required, but %s installed, "
"PaddlePaddle version {} or higher is required, but {} installed, "
"Maybe you are using a develop version, "
"please make sure the version is good with your code."
% (min_version, fluid_version.full_version)
"please make sure the version is good with your code.".format(min_version, fluid_version.full_version)
)
return

Expand All @@ -576,15 +574,13 @@ def version_cmp(ver_a, ver_b):
or version_cmp(version_installed, min_version_to_check) < 0
):
raise Exception(
"VersionError: PaddlePaddle version in [%s, %s] required, but %s installed."
% (min_version, max_version, fluid_version.full_version)
"VersionError: PaddlePaddle version in [{}, {}] required, but {} installed.".format(min_version, max_version, fluid_version.full_version)
)
else:
if version_cmp(version_installed, min_version_to_check) < 0:
raise Exception(
"VersionError: PaddlePaddle version %s or higher is required, but %s installed, "
"please upgrade your PaddlePaddle to %s or other higher version."
% (min_version, fluid_version.full_version, min_version)
"VersionError: PaddlePaddle version {} or higher is required, but {} installed, "
"please upgrade your PaddlePaddle to {} or other higher version.".format(min_version, fluid_version.full_version, min_version)
)


Expand Down Expand Up @@ -648,11 +644,10 @@ def _set_pipeline_stage(stage):
def _fake_interface_only_(func):
def __impl__(*args, **kwargs):
raise AssertionError(
"'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
"'{}' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
" 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n"
" 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. "
"If you have to translate dynamic graph to static graph, please use other API to replace '%s'."
% (func.__name__, func.__name__)
"If you have to translate dynamic graph to static graph, please use other API to replace '{}'.".format(func.__name__, func.__name__)
)

return __impl__
Expand Down Expand Up @@ -1910,7 +1905,7 @@ def to_string(self, throw_on_error, with_details=False):
if with_details:
additional_attr = ("error_clip",)
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name))
res_str += "{}: {}\n".format(attr_name, getattr(self, attr_name))

return res_str

Expand Down Expand Up @@ -3082,18 +3077,16 @@ def find_name(var_list, name):
raise ValueError(
(
"Incorrect setting for output(s) of "
"operator \"%s\", should set: [%s]."
)
% (type, m.name)
"operator \"{}\", should set: [{}]."
).format(type, m.name)
)
else:
if not ((m.name in outputs) or m.dispensable):
raise ValueError(
(
"Incorrect setting for output(s) of "
"operator \"%s\", should set: [%s]."
)
% (type, m.name)
"operator \"{}\", should set: [{}]."
).format(type, m.name)
)

for out_proto in proto.outputs:
Expand Down Expand Up @@ -3136,7 +3129,7 @@ def find_name(var_list, name):
for attr_name in extra_attrs_map.keys():
if os.environ.get('FLAGS_print_extra_attrs', '0') == '1':
warnings.warn(
"op %s use extra_attr: %s" % (type, attr_name)
"op {} use extra_attr: {}".format(type, attr_name)
)

if (attr_name not in op_attrs) or (
Expand All @@ -3154,7 +3147,7 @@ def find_name(var_list, name):
for attr in attrs:
if attr in op_attrs.keys():
warnings.warn(
"op %s use extra_attr: %s" % (type, attr)
"op {} use extra_attr: {}".format(type, attr)
)

if type in special_op_attrs:
Expand All @@ -3167,8 +3160,7 @@ def find_name(var_list, name):
and default_value != op_attrs[a_name]
):
warnings.warn(
"op %s's attr %s = %s is not the default value: %s"
% (
"op {}'s attr {} = {} is not the default value: {}".format(
type,
a_name,
op_attrs[a_name],
Expand Down Expand Up @@ -3757,8 +3749,7 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs):
and inplace_map.get("Input", None) == "Out"
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'
% (op_type, k)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(op_type, k)
)
elif isinstance(v, list):
for var in v:
Expand All @@ -3768,8 +3759,7 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs):
and inplace_map.get("Input", None) == "Out"
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'
% (op_type, k)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(op_type, k)
)


Expand Down Expand Up @@ -7361,7 +7351,7 @@ def to_string(self, throw_on_error, with_details=False):
"need_clip",
)
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name))
res_str += "{}: {}\n".format(attr_name, getattr(self, attr_name))
else:
res_str = Variable.to_string(self, throw_on_error, False)
return res_str
Expand Down
3 changes: 1 addition & 2 deletions python/paddle/base/layers/layer_function_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,7 @@ def func(x, name=None):
and x.is_view_var
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call %s = %s.assign().'
% (inplace_op_type, x.name, x.name, x.nameb)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call {} = {}.assign().'.format(inplace_op_type, x.name, x.name, x.nameb)
)
return generate_activation_fn(origin_op_type)(x, name)

Expand Down
5 changes: 2 additions & 3 deletions python/paddle/base/layers/math_op_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,10 +553,9 @@ def __impl__(self, other_var):
file_name = stack[1]
line_num = stack[2]
warnings.warn(
"%s:%s\nThe behavior of expression %s has been unified with %s(X, Y, axis=-1) from Paddle 2.0. "
"{}:{}\nThe behavior of expression {} has been unified with {}(X, Y, axis=-1) from Paddle 2.0. "
"If your code works well in the older versions but crashes in this version, try to use "
"%s(X, Y, axis=0) instead of %s. This transitional warning will be dropped in the future."
% (
"{}(X, Y, axis=0) instead of {}. This transitional warning will be dropped in the future.".format(
file_name,
line_num,
EXPRESSION_MAP[method_name],
Expand Down
8 changes: 4 additions & 4 deletions python/paddle/base/trainer_desc.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def _set_copy_table_config(self, config_dict):
if len(src_sparse_tables) != len(dest_sparse_tables):
raise ValueError(
"len(src_sparse_tables) != len(dest_sparse_tables),"
" %s vs %s" % (len(src_sparse_tables), len(dest_sparse_tables))
" {} vs {}".format(len(src_sparse_tables), len(dest_sparse_tables))
)
for i in src_sparse_tables:
config.src_sparse_tables.append(i)
Expand All @@ -260,7 +260,7 @@ def _set_copy_table_config(self, config_dict):
if len(src_dense_tables) != len(dest_dense_tables):
raise ValueError(
"len(src_dense_tables) != len(dest_dense_tables),"
" %s vs %s" % (len(src_dense_tables), len(dest_dense_tables))
" {} vs {}".format(len(src_dense_tables), len(dest_dense_tables))
)
for i in src_dense_tables:
config.src_dense_tables.append(i)
Expand All @@ -277,8 +277,8 @@ def _set_copy_table_config(self, config_dict):
dest_var_list = [dest_var_list]
if len(src_var_list) != len(dest_var_list):
raise ValueError(
"len(src_var_list) != len(dest_var_list), %s vs"
" %s" % (len(src_var_list), len(dest_var_list))
"len(src_var_list) != len(dest_var_list), {} vs"
" {}".format(len(src_var_list), len(dest_var_list))
)
for i in src_var_list:
config.src_var_list.append(i)
Expand Down