Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][task 35-37] enable Flake8 E712, E266, E714 rule in python/paddle/base #58319

Merged
merged 4 commits into from
Nov 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,3 @@ per-file-ignores =

# Ignore compare with True in sot unittest
test/sot/test_dup_top.py:E712

# temp ignore base directory
python/paddle/base/*:
E712,
E266,
E714
2 changes: 1 addition & 1 deletion python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -2348,7 +2348,7 @@ def _find_op_path_(
# If block is while block, dealing with op specifically again.
# TODO(liym27): Consider special types of ops.
for i, op in reversed(list(enumerate(block.ops))):
if relevant_op_flags[i] == False and _some_in_set_(
if relevant_op_flags[i] is False and _some_in_set_(
op.desc.output_arg_names(), output_names
):
relevant_op_flags[i] = True
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/base/device_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,7 @@ def _gen_worker_desc(self, trainer_desc):
if (
opt_info["use_cvm"]
or "no_cvm" in opt_info
and opt_info["no_cvm"] == True
and opt_info["no_cvm"] is True
):
sparse_table.emb_dim = self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i
Expand Down Expand Up @@ -560,7 +560,7 @@ def _gen_worker_desc(self, trainer_desc):
if (
opt_info["use_cvm"]
or "no_cvm" in opt_info
and opt_info["no_cvm"] == True
and opt_info["no_cvm"] is True
):
sparse_table.emb_dim = self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -928,7 +928,7 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
# (2): when used in flask framework, it may result in hang.
# Details: https://github.com/PaddlePaddle/Paddle/issues/26635
# So, we temporally diable the zero_copy strategy.
if zero_copy == True:
if zero_copy is True:
warnings.warn(
"Currently, zero_copy is not supported, and it will be discarded."
)
Expand Down
16 changes: 8 additions & 8 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1711,7 +1711,7 @@ def _run_impl(
if isinstance(program, Program) and program._heter_pipeline_opt:
# print("program._heter_pipeline_opt: {}".format(
# program._heter_pipeline_opt))
## change default executor
# change default executor
heter_place = program._heter_pipeline_opt["heter_place"]
heter_place = framework._get_paddle_place(heter_place)
p = core.Place()
Expand Down Expand Up @@ -1868,12 +1868,12 @@ def _run_impl(
varobj = global_block.vars[varname]

if (
vardesc.persistable() == False
vardesc.persistable() is False
and vardesc.type() == core.VarDesc.VarType.LOD_TENSOR
and vardesc.need_check_feed() == True
and varobj.stop_gradient == True
and varobj.is_data == True
and varobj.belong_to_optimizer == False
and vardesc.need_check_feed() is True
and varobj.stop_gradient is True
and varobj.is_data is True
and varobj.belong_to_optimizer is False
and varname not in feed
):
raise ValueError('Need feed data for variable %s' % varname)
Expand Down Expand Up @@ -2159,7 +2159,7 @@ def _prepare_trainer(
):
is_heter = 0
use_ps_gpu = 0
if not program._fleet_opt is None:
if program._fleet_opt is not None:
if program._fleet_opt.get("worker_class", "") == "HeterCpuWorker":
is_heter = 1
if program._fleet_opt.get("trainer", "") == "HeterXpuTrainer":
Expand Down Expand Up @@ -2285,7 +2285,7 @@ def _run_from_dataset(
raise RuntimeError(
"dataset is need and should be initialized"
)
## change default executor
# change default executor
heter_place = framework._get_paddle_place(heter_place)
p = core.Place()
p.set_place(heter_place)
Expand Down
6 changes: 3 additions & 3 deletions python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -2995,7 +2995,7 @@ def __init__(
if (
type == 'less_than'
and op_attrs['force_cpu'] is not None
) or op_attrs['force_cpu'] != False:
) or op_attrs['force_cpu'] is not False:
warnings.warn(
"The Attr(force_cpu) of Op(%s) will be deprecated in the future, "
"please use 'device_guard' instead. 'device_guard' has higher priority when they are "
Expand Down Expand Up @@ -4266,7 +4266,7 @@ def _rename_var(self, name, new_name):
return var

def _remove_var(self, name, sync=True):
if sync == True:
if sync is True:
self._sync_with_cpp()
self.desc._remove_var(name.encode())
del self.vars[name]
Expand Down Expand Up @@ -4455,7 +4455,7 @@ def _remove_op(self, index, sync=True):
Returns:
None
"""
if sync == True:
if sync is True:
self._sync_with_cpp()
self.desc._remove_op(index, index + 1)
del self.ops[index]
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/trainer_desc.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def _set_infer(self, infer):

def _set_fleet_desc(self, fleet_desc):
self._fleet_desc = fleet_desc
## serialize fleet_desc
# serialize fleet_desc
from google.protobuf import text_format

fleet_desc_str = text_format.MessageToString(fleet_desc)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/trainer_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def handler_launch_func(self, scope, handler):
elapsed_secs = 0
while True:
self.running_lock.acquire()
if self.running == False:
if self.running is False:
break
if elapsed_secs < period_secs:
# TODO(guru4elephant): needs customized condition
Expand Down