Skip to content

Commit

Permalink
[CodeStyle][task 7] enable Ruff UP031 rule in python/paddle/base (Pad…
Browse files Browse the repository at this point in the history
…dlePaddle#57665)

* delete UP031

* [CodeStyle][task 7] enable Ruff UP031 rule in python/paddle/base

* modified:   python/paddle/base/backward.py
	modified:   python/paddle/base/data_feeder.py
	modified:   python/paddle/base/dygraph/base.py
	modified:   python/paddle/base/executor.py
	modified:   python/paddle/base/framework.py
	modified:   python/paddle/base/layers/layer_function_generator.py
  • Loading branch information
Kaedeharai authored and Frida-a committed Oct 14, 2023
1 parent 969cb31 commit 7049e94
Show file tree
Hide file tree
Showing 10 changed files with 73 additions and 71 deletions.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ ignore = [

# Temporarily ignored
"python/paddle/base/**" = [
"UP031",
"C408",
"UP030",
"C405",
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def modify_forward_desc_for_recompute(self):


def _pretty_op_desc_(op_desc, prefix):
out_s = "%s\tname:[%s]\n%s \tinputs:[%s]\n%s \toutputs:[%s]" % (
out_s = "{}\tname:[{}]\n{} \tinputs:[{}]\n{} \toutputs:[{}]".format(
prefix + "_op",
str(op_desc.type()),
prefix + "_input",
Expand Down Expand Up @@ -2441,8 +2441,9 @@ def calc_gradient_helper(
raise ValueError("all targets must be in the same block")
if target.shape != grad.shape:
raise ValueError(
"The shapes of target and grad are different: %s %s"
% (target.name, grad.name)
"The shapes of target and grad are different: {} {}".format(
target.name, grad.name
)
)
target_grad_map[_append_grad_suffix_(target.name)] = grad.name
input_grad_names_set.add(grad.name)
Expand Down
9 changes: 4 additions & 5 deletions python/paddle/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,10 @@
if os.name == 'nt':
executable_path = os.path.abspath(os.path.dirname(sys.executable))
raise ImportError(
"""NOTE: You may need to run \"set PATH=%s;%%PATH%%\"
f"""NOTE: You may need to run \"set PATH={executable_path};%PATH%\"
if you encounters \"DLL load failed\" errors. If you have python
installed in other directory, replace \"%s\" with your own
directory. The original error is: \n %s"""
% (executable_path, executable_path, str(e))
installed in other directory, replace \"{executable_path}\" with your own
directory. The original error is: \n {str(e)}"""
)
else:
raise ImportError(
Expand Down Expand Up @@ -197,7 +196,7 @@ def run_shell_command(cmd):
def get_dso_path(core_so, dso_name):
if core_so and dso_name:
return run_shell_command(
"ldd %s|grep %s|awk '{print $3}'" % (core_so, dso_name)
f"ldd {core_so}|grep {dso_name}|awk '{{print $3}}'"
)
else:
return None
Expand Down
18 changes: 10 additions & 8 deletions python/paddle/base/data_feeder.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,9 @@ def check_type(input, input_name, expected_type, op_name, extra_message=''):
)
if not isinstance(input, expected_type):
raise TypeError(
"The type of '%s' in %s must be %s, but received %s. %s"
% (input_name, op_name, expected_type, type(input), extra_message)
"The type of '{}' in {} must be {}, but received {}. {}".format(
input_name, op_name, expected_type, type(input), extra_message
)
)


Expand All @@ -197,22 +198,23 @@ def check_dtype(
return
if convert_dtype(input_dtype) in ['float16']:
warnings.warn(
"The data type of '%s' in %s only support float16 in GPU now. %s"
% (input_name, op_name, extra_message)
"The data type of '{}' in {} only support float16 in GPU now. {}".format(
input_name, op_name, extra_message
)
)
if convert_dtype(input_dtype) in ['uint16'] and op_name not in [
'reshape',
'lookup_table',
'scale',
]:
warnings.warn(
"The data type of '%s' in %s only support bfloat16 in OneDNN now. %s"
% (input_name, op_name, extra_message)
"The data type of '{}' in {} only support bfloat16 in OneDNN now. {}".format(
input_name, op_name, extra_message
)
)
if convert_dtype(input_dtype) not in expected_dtype:
raise TypeError(
"The data type of '%s' in %s must be %s, but received %s. %s"
% (
"The data type of '{}' in {} must be {}, but received {}. {}".format(
input_name,
op_name,
expected_dtype,
Expand Down
5 changes: 3 additions & 2 deletions python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -884,8 +884,9 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
)
if not isinstance(value, support_type):
raise TypeError(
"The type of 'value' in base.dygraph.to_variable must be %s, but received %s."
% (support_type, type(value))
"The type of 'value' in base.dygraph.to_variable must be {}, but received {}.".format(
support_type, type(value)
)
)
if isinstance(value, (core.eager.Tensor, framework.Variable)):
return value
Expand Down
16 changes: 9 additions & 7 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,9 @@ def check_feed_shape_type(var, feed, num_places=1):
else feed._dtype()
)
raise ValueError(
'The data type of fed Variable %r must be %r, but received %r'
% (var.name, var_dtype_format, feed_dtype_format)
'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(
var.name, var_dtype_format, feed_dtype_format
)
)
return True

Expand Down Expand Up @@ -305,8 +306,9 @@ def pir_check_feed_shape_type(feed, name, target_shape, dtype, num_places=1):
else feed._dtype()
)
raise ValueError(
'The data type of fed Variable %r must be %r, but received %r'
% (name, var_dtype_format, feed_dtype_format)
'The data type of fed Variable {!r} must be {!r}, but received {!r}'.format(
name, var_dtype_format, feed_dtype_format
)
)
return True

Expand Down Expand Up @@ -487,7 +489,7 @@ def _add_feed_fetch_ops(
for i, var in enumerate(fetch_list):
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
), f"Wrong type for fetch_list[{i}]: {type(var)}"
global_block.append_op(
type=fetch_op,
inputs={'X': [var]},
Expand All @@ -510,7 +512,7 @@ def _add_pir_fetch_ops(program, fetch_list, fetch_var_name):
for i, fetch_input in enumerate(fetch_list):
assert isinstance(
fetch_input, OpResult
), "Wrong type for fetch_list[%s]: %s" % (i, type(fetch_input))
), f"Wrong type for fetch_list[{i}]: {type(fetch_input)}"
paddle._pir_ops.fetch(fetch_input, fetch_var_name + str(i), i)


Expand Down Expand Up @@ -2792,7 +2794,7 @@ def _add_fetch_ops(
for i, var in enumerate(fetch_list):
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
), f"Wrong type for fetch_list[{i}]: {type(var)}"
global_block.append_op(
type=fetch_op,
inputs={'X': [var]},
Expand Down
70 changes: 34 additions & 36 deletions python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,17 +521,19 @@ def version_cmp(ver_a, ver_b):
if version_cmp(version_installed, zero_version) == 0:
if max_version is not None:
warnings.warn(
"PaddlePaddle version in [%s, %s] required, but %s installed. "
"PaddlePaddle version in [{}, {}] required, but {} installed. "
"Maybe you are using a develop version, "
"please make sure the version is good with your code."
% (min_version, max_version, fluid_version.full_version)
"please make sure the version is good with your code.".format(
min_version, max_version, fluid_version.full_version
)
)
else:
warnings.warn(
"PaddlePaddle version %s or higher is required, but %s installed, "
"PaddlePaddle version {} or higher is required, but {} installed, "
"Maybe you are using a develop version, "
"please make sure the version is good with your code."
% (min_version, fluid_version.full_version)
"please make sure the version is good with your code.".format(
min_version, fluid_version.full_version
)
)
return

Expand All @@ -551,15 +553,17 @@ def version_cmp(ver_a, ver_b):
or version_cmp(version_installed, min_version_to_check) < 0
):
raise Exception(
"VersionError: PaddlePaddle version in [%s, %s] required, but %s installed."
% (min_version, max_version, fluid_version.full_version)
"VersionError: PaddlePaddle version in [{}, {}] required, but {} installed.".format(
min_version, max_version, fluid_version.full_version
)
)
else:
if version_cmp(version_installed, min_version_to_check) < 0:
raise Exception(
"VersionError: PaddlePaddle version %s or higher is required, but %s installed, "
"please upgrade your PaddlePaddle to %s or other higher version."
% (min_version, fluid_version.full_version, min_version)
"VersionError: PaddlePaddle version {} or higher is required, but {} installed, "
"please upgrade your PaddlePaddle to {} or other higher version.".format(
min_version, fluid_version.full_version, min_version
)
)


Expand Down Expand Up @@ -623,11 +627,12 @@ def _set_pipeline_stage(stage):
def _fake_interface_only_(func):
def __impl__(*args, **kwargs):
raise AssertionError(
"'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
"'{}' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
" 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n"
" 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. "
"If you have to translate dynamic graph to static graph, please use other API to replace '%s'."
% (func.__name__, func.__name__)
"If you have to translate dynamic graph to static graph, please use other API to replace '{}'.".format(
func.__name__, func.__name__
)
)

return __impl__
Expand Down Expand Up @@ -1882,7 +1887,7 @@ def to_string(self, throw_on_error, with_details=False):
if with_details:
additional_attr = ("error_clip",)
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name))
res_str += f"{attr_name}: {getattr(self, attr_name)}\n"

return res_str

Expand Down Expand Up @@ -3055,20 +3060,14 @@ def find_name(var_list, name):
or m.intermediate
):
raise ValueError(
(
"Incorrect setting for output(s) of "
"operator \"%s\", should set: [%s]."
)
% (type, m.name)
"Incorrect setting for output(s) of "
f"operator \"{type}\", should set: [{m.name}]."
)
else:
if not ((m.name in outputs) or m.dispensable):
raise ValueError(
(
"Incorrect setting for output(s) of "
"operator \"%s\", should set: [%s]."
)
% (type, m.name)
"Incorrect setting for output(s) of "
f"operator \"{type}\", should set: [{m.name}]."
)

for out_proto in proto.outputs:
Expand Down Expand Up @@ -3110,9 +3109,7 @@ def find_name(var_list, name):
self._update_desc_attr(attr_name, attr_val)
for attr_name in extra_attrs_map.keys():
if os.environ.get('FLAGS_print_extra_attrs', '0') == '1':
warnings.warn(
"op %s use extra_attr: %s" % (type, attr_name)
)
warnings.warn(f"op {type} use extra_attr: {attr_name}")

if (attr_name not in op_attrs) or (
op_attrs[attr_name] is None
Expand All @@ -3129,7 +3126,7 @@ def find_name(var_list, name):
for attr in attrs:
if attr in op_attrs.keys():
warnings.warn(
"op %s use extra_attr: %s" % (type, attr)
f"op {type} use extra_attr: {attr}"
)

if type in special_op_attrs:
Expand All @@ -3142,8 +3139,7 @@ def find_name(var_list, name):
and default_value != op_attrs[a_name]
):
warnings.warn(
"op %s's attr %s = %s is not the default value: %s"
% (
"op {}'s attr {} = {} is not the default value: {}".format(
type,
a_name,
op_attrs[a_name],
Expand Down Expand Up @@ -3718,8 +3714,9 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs):
and inplace_map.get("Input", None) == "Out"
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'
% (op_type, k)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(
op_type, k
)
)
elif isinstance(v, list):
for var in v:
Expand All @@ -3729,8 +3726,9 @@ def check_if_to_static_diff_with_dygraph(op_type, inplace_map, outputs):
and inplace_map.get("Input", None) == "Out"
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'
% (op_type, k)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. If you are sure it is safe, you can call with paddle.base.framework._stride_in_no_check_dy2st_diff() in your safe code block.'.format(
op_type, k
)
)


Expand Down Expand Up @@ -7309,7 +7307,7 @@ def to_string(self, throw_on_error, with_details=False):
"need_clip",
)
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name, getattr(self, attr_name))
res_str += f"{attr_name}: {getattr(self, attr_name)}\n"
else:
res_str = Variable.to_string(self, throw_on_error, False)
return res_str
Expand Down
5 changes: 3 additions & 2 deletions python/paddle/base/layers/layer_function_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,9 @@ def func(x, name=None):
and x.is_view_var
):
raise ValueError(
'Sorry about what\'s happend. In to_static mode, %s\'s output variable %s is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call %s = %s.assign().'
% (inplace_op_type, x.name, x.name, x.nameb)
'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call {} = {}.assign().'.format(
inplace_op_type, x.name, x.name, x.nameb
)
)
return generate_activation_fn(origin_op_type)(x, name)

Expand Down
5 changes: 2 additions & 3 deletions python/paddle/base/layers/math_op_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,10 +548,9 @@ def __impl__(self, other_var):
file_name = stack[1]
line_num = stack[2]
warnings.warn(
"%s:%s\nThe behavior of expression %s has been unified with %s(X, Y, axis=-1) from Paddle 2.0. "
"{}:{}\nThe behavior of expression {} has been unified with {}(X, Y, axis=-1) from Paddle 2.0. "
"If your code works well in the older versions but crashes in this version, try to use "
"%s(X, Y, axis=0) instead of %s. This transitional warning will be dropped in the future."
% (
"{}(X, Y, axis=0) instead of {}. This transitional warning will be dropped in the future.".format(
file_name,
line_num,
EXPRESSION_MAP[method_name],
Expand Down
8 changes: 4 additions & 4 deletions python/paddle/base/trainer_desc.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def _set_copy_table_config(self, config_dict):
if len(src_sparse_tables) != len(dest_sparse_tables):
raise ValueError(
"len(src_sparse_tables) != len(dest_sparse_tables),"
" %s vs %s" % (len(src_sparse_tables), len(dest_sparse_tables))
f" {len(src_sparse_tables)} vs {len(dest_sparse_tables)}"
)
for i in src_sparse_tables:
config.src_sparse_tables.append(i)
Expand All @@ -253,7 +253,7 @@ def _set_copy_table_config(self, config_dict):
if len(src_dense_tables) != len(dest_dense_tables):
raise ValueError(
"len(src_dense_tables) != len(dest_dense_tables),"
" %s vs %s" % (len(src_dense_tables), len(dest_dense_tables))
f" {len(src_dense_tables)} vs {len(dest_dense_tables)}"
)
for i in src_dense_tables:
config.src_dense_tables.append(i)
Expand All @@ -270,8 +270,8 @@ def _set_copy_table_config(self, config_dict):
dest_var_list = [dest_var_list]
if len(src_var_list) != len(dest_var_list):
raise ValueError(
"len(src_var_list) != len(dest_var_list), %s vs"
" %s" % (len(src_var_list), len(dest_var_list))
f"len(src_var_list) != len(dest_var_list), {len(src_var_list)} vs"
f" {len(dest_var_list)}"
)
for i in src_var_list:
config.src_var_list.append(i)
Expand Down

0 comments on commit 7049e94

Please sign in to comment.