Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Don't merge][CodeStyle][pyupgrade] automatically rewrite code with pyupgrade #48140

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,11 @@ repos:
entry: bash ./tools/codestyle/pylint_pre_commit.hook
language: system
files: \.(py)$
- repo: https://github.com/asottile/pyupgrade
rev: v3.2.2
hooks:
- id: pyupgrade
args: ["--py37-plus"]
# For C++ files
- repo: local
hooks:
Expand Down
6 changes: 3 additions & 3 deletions cmake/copyfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,18 @@ def main():
dst = os.path.join(dst, pathList[-1])
if not os.path.exists(dst):
shutil.copytree(src, dst)
print("first copy directory: {0} --->>> {1}".format(src, dst))
print(f"first copy directory: {src} --->>> {dst}")
else:
shutil.rmtree(dst)
shutil.copytree(src, dst)
print("overwritten copy directory: {0} --->>> {1}".format(src, dst))
print(f"overwritten copy directory: {src} --->>> {dst}")
else: # copy file, wildcard
if not os.path.exists(dst):
os.makedirs(dst)
srcFiles = glob.glob(src)
for srcFile in srcFiles:
shutil.copy(srcFile, dst)
print("copy file: {0} --->>> {1}".format(srcFile, dst))
print(f"copy file: {srcFile} --->>> {dst}")


if __name__ == "__main__":
Expand Down
90 changes: 44 additions & 46 deletions paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,49 +18,47 @@
####################
# Global Variables #
####################
ops_to_fill_zero_for_empty_grads = set(
[
"split_grad",
"split_with_num_grad",
"rnn_grad",
"matmul_double_grad",
"matmul_triple_grad",
"sigmoid_double_grad",
"sigmoid_triple_grad",
"add_double_grad",
"add_triple_grad",
"multiply_grad",
"multiply_double_grad",
"multiply_triple_grad",
"conv2d_grad_grad",
"batch_norm_double_grad",
"tanh_double_grad",
"tanh_triple_grad",
"sin_double_grad",
"sin_triple_grad",
"cos_double_grad",
"cos_triple_grad",
"subtract_double_grad",
"divide_double_grad",
"log_double_grad",
"elu_double_grad",
"leaky_relu_double_grad",
"sqrt_double_grad",
"rsqrt_double_grad",
"square_double_grad",
"celu_double_grad",
"pad_double_grad",
"pad3d_double_grad",
"squeeze_double_grad",
"unsqueeze_double_grad",
"instance_norm_double_grad",
"conv3d_double_grad",
"depthwise_conv2d_grad_grad",
"concat_double_grad",
"expand_grad",
"argsort_grad",
]
)
ops_to_fill_zero_for_empty_grads = {
"split_grad",
"split_with_num_grad",
"rnn_grad",
"matmul_double_grad",
"matmul_triple_grad",
"sigmoid_double_grad",
"sigmoid_triple_grad",
"add_double_grad",
"add_triple_grad",
"multiply_grad",
"multiply_double_grad",
"multiply_triple_grad",
"conv2d_grad_grad",
"batch_norm_double_grad",
"tanh_double_grad",
"tanh_triple_grad",
"sin_double_grad",
"sin_triple_grad",
"cos_double_grad",
"cos_triple_grad",
"subtract_double_grad",
"divide_double_grad",
"log_double_grad",
"elu_double_grad",
"leaky_relu_double_grad",
"sqrt_double_grad",
"rsqrt_double_grad",
"square_double_grad",
"celu_double_grad",
"pad_double_grad",
"pad3d_double_grad",
"squeeze_double_grad",
"unsqueeze_double_grad",
"instance_norm_double_grad",
"conv3d_double_grad",
"depthwise_conv2d_grad_grad",
"concat_double_grad",
"expand_grad",
"argsort_grad",
}

# For API dispatch used at python-level
# { op_name : [arg_name, ...] }
Expand Down Expand Up @@ -105,15 +103,15 @@ def AssertMessage(lhs_str, rhs_str):


def ReadFwdFile(filepath):
f = open(filepath, 'r')
f = open(filepath)
# empty file loaded by yaml is None
contents = yaml.load(f, Loader=yaml.FullLoader)
f.close()
return contents if contents is not None else []


def ReadBwdFile(filepath):
f = open(filepath, 'r')
f = open(filepath)
contents = yaml.load(f, Loader=yaml.FullLoader)
ret = {}
if contents is not None:
Expand Down Expand Up @@ -520,7 +518,7 @@ def DetermineForwardPositionMap(
if len(forward_returns_list) == 1:
return_name = "out"
else:
return_name = "out_{}".format(i + 1)
return_name = f"out_{i + 1}"
else:
return_name = forward_return[0]
return_type = forward_return[1]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
# But because there is no check in old dygraph mode, in order to
# keeping the code compatible, here we also skip inplace check in new dygraph temporarily,
# and this will be fixed in the futrue.
inplace_check_blacklist = set(["assign_out_"])
inplace_check_blacklist = {"assign_out_"}

# Black Ops list that's NO NEED to apply code generation
black_ops_list = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
#########################
# Global Configurations #
#########################
skipped_forward_api_names = set([])
skipped_forward_api_names = set()


def SkipAPIGeneration(forward_api_name):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def download_concat(cache_folder, zip_path):
download(data_urls[i], cache_folder, data_md5s[i])
file_name = os.path.join(cache_folder, data_urls[i].split('/')[-1])
file_names.append(file_name)
print("Downloaded part {0}\n".format(file_name))
print(f"Downloaded part {file_name}\n")
with open(zip_path, "wb") as outfile:
for fname in file_names:
shutil.copyfileobj(open(fname, 'rb'), outfile)
Expand Down Expand Up @@ -172,13 +172,13 @@ def run_convert():
retry = retry + 1
else:
raise RuntimeError(
"Can not convert the dataset to binary file with try limit {0}".format(
"Can not convert the dataset to binary file with try limit {}".format(
try_limit
)
)
download_concat(cache_folder, zip_path)
convert_Imagenet_tar2bin(zip_path, output_file)
print("\nSuccess! The binary file can be found at {0}".format(output_file))
print(f"\nSuccess! The binary file can be found at {output_file}")


def convert_Imagenet_local2bin(args):
Expand Down Expand Up @@ -229,7 +229,7 @@ def convert_Imagenet_local2bin(args):
)
if os.path.getsize(bin_file_path) == target_size:
print(
"Success! The user data output binary file can be found at: {0}".format(
"Success! The user data output binary file can be found at: {}".format(
bin_file_path
)
)
Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/operators/generator/cross_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
def main(forward_op_yaml_paths, backward_op_yaml_paths):
ops = {}
for op_yaml_path in chain(forward_op_yaml_paths, backward_op_yaml_paths):
with open(op_yaml_path, "rt", encoding="utf-8") as f:
with open(op_yaml_path, encoding="utf-8") as f:
op_list = yaml.safe_load(f)
if op_list is not None:
ops.update(to_named_dict((op_list)))
ops.update(to_named_dict(op_list))

cross_validate(ops)

Expand Down
16 changes: 7 additions & 9 deletions paddle/fluid/operators/generator/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@


def quote(s):
return '"{}"'.format(s)
return f'"{s}"'


# ------------------------------ attr -------------------------------------
Expand Down Expand Up @@ -84,16 +84,16 @@ def to_sr_output_type(s):
# -------------- transform argument names from yaml to opmaker ------------
def to_opmaker_name(s):
if s.endswith("_grad"):
return 'GradVarName("{}")'.format(s[:-5])
return f'GradVarName("{s[:-5]}")'
else:
return '"{}"'.format(s)
return f'"{s}"'


def to_opmaker_name_cstr(s):
if s.endswith("_grad"):
return '"{}@GRAD"'.format(s[:-5])
return f'"{s[:-5]}@GRAD"'
else:
return '"{}"'.format(s)
return f'"{s}"'


def to_pascal_case(s):
Expand Down Expand Up @@ -122,11 +122,9 @@ def cartesian_prod_attrs(attrs):
type_name = attr["typename"]
name = attr["name"]
if type_name == "Scalar":
items.append((name, "{}Tensor".format(name)))
items.append((name, f"{name}Tensor"))
elif type_name == "IntArray":
items.append(
(name, "{}Tensor".format(name), "{}TensorList".format(name))
)
items.append((name, f"{name}Tensor", f"{name}TensorList"))
else:
items.append((name,))

Expand Down
12 changes: 6 additions & 6 deletions paddle/fluid/operators/generator/generate_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,23 +307,23 @@ def main(
output_op_path,
output_arg_map_path,
):
with open(ops_yaml_path, "rt") as f:
with open(ops_yaml_path) as f:
ops = yaml.safe_load(f)
ops = [restruct_io(op) for op in ops]
forward_op_dict = to_named_dict(ops)

with open(backward_yaml_path, "rt") as f:
with open(backward_yaml_path) as f:
backward_ops = yaml.safe_load(f)
backward_ops = [restruct_io(op) for op in backward_ops]
backward_op_dict = to_named_dict(backward_ops)

with open(op_version_yaml_path, "rt") as f:
with open(op_version_yaml_path) as f:
op_versions = yaml.safe_load(f)
# add op version info into op
for op_version in op_versions:
forward_op_dict[op_version['op']]['version'] = op_version['version']

with open(op_compat_yaml_path, "rt") as f:
with open(op_compat_yaml_path) as f:
op_op_map = yaml.safe_load(f)

for op in ops:
Expand Down Expand Up @@ -356,14 +356,14 @@ def main(
return

op_template = env.get_template('op.c.j2')
with open(output_op_path, "wt") as f:
with open(output_op_path, "w") as f:
msg = op_template.render(
ops=ops, backward_ops=backward_ops, op_dict=op_dict
)
f.write(msg)

ks_template = env.get_template('ks.c.j2')
with open(output_arg_map_path, 'wt') as f:
with open(output_arg_map_path, 'w') as f:
msg = ks_template.render(ops=ops, backward_ops=backward_ops)
f.write(msg)

Expand Down
8 changes: 4 additions & 4 deletions paddle/fluid/operators/generator/generate_sparse_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,12 @@ def restruct_io(op):


def main(op_yaml_path, backward_yaml_path, output_op_path, output_arg_map_path):
with open(op_yaml_path, "rt") as f:
with open(op_yaml_path) as f:
ops = yaml.safe_load(f)
ops = [restruct_io(op) for op in ops]
forward_op_dict = to_named_dict(ops)

with open(backward_yaml_path, "rt") as f:
with open(backward_yaml_path) as f:
backward_ops = yaml.safe_load(f)
backward_ops = [restruct_io(op) for op in backward_ops]
backward_op_dict = to_named_dict(backward_ops)
Expand Down Expand Up @@ -124,14 +124,14 @@ def main(op_yaml_path, backward_yaml_path, output_op_path, output_arg_map_path):
return

op_template = env.get_template('sparse_op.c.j2')
with open(output_op_path, "wt") as f:
with open(output_op_path, "w") as f:
msg = op_template.render(
ops=ops, backward_ops=backward_ops, op_dict=op_dict
)
f.write(msg)

ks_template = env.get_template('sparse_ks.c.j2')
with open(output_arg_map_path, 'wt') as f:
with open(output_arg_map_path, 'w') as f:
msg = ks_template.render(ops=ops, backward_ops=backward_ops)
f.write(msg)

Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/operators/generator/parse_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


def main(op_yaml_path, output_path, backward):
with open(op_yaml_path, "rt") as f:
with open(op_yaml_path) as f:
ops = yaml.safe_load(f)
if ops is None:
ops = []
Expand All @@ -30,7 +30,7 @@ def main(op_yaml_path, output_path, backward):
for op in ops
]

with open(output_path, "wt") as f:
with open(output_path, "w") as f:
yaml.safe_dump(ops, f, default_flow_style=None, sort_keys=False)


Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/operators/generator/parse_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def parse_arg(op_name: str, s: str) -> Dict[str, str]:
1. typename name
2. typename name = default_value
"""
typename, rest = [item.strip() for item in s.split(" ", 1)]
typename, rest = (item.strip() for item in s.split(" ", 1))
assert (
len(typename) > 0
), f"The arg typename should not be empty. Please check the args of {op_name} in yaml."
Expand All @@ -42,7 +42,7 @@ def parse_arg(op_name: str, s: str) -> Dict[str, str]:
rest.count("=") <= 1
), f"There is more than 1 = in an arg in {op_name}"
if rest.count("=") == 1:
name, default_value = [item.strip() for item in rest.split("=", 1)]
name, default_value = (item.strip() for item in rest.split("=", 1))
assert (
len(name) > 0
), f"The arg name should not be empty. Please check the args of {op_name} in yaml."
Expand Down
2 changes: 1 addition & 1 deletion paddle/infrt/tests/models/efficientnet-b4/net/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,4 +421,4 @@ def load_pretrained_weights(
state_dict.pop('_fc.bias')
model.set_state_dict(state_dict)

print('Loaded pretrained weights for {}'.format(model_name))
print(f'Loaded pretrained weights for {model_name}')
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def generate_api(api_yaml_path, header_file_path, source_file_path):
apis = []

for each_api_yaml in api_yaml_path:
with open(each_api_yaml, 'r') as f:
with open(each_api_yaml) as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
apis.extend(api_list)
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/backward_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def generate_backward_api(

bw_apis = []
for each_api_yaml in backward_yaml_path:
with open(each_api_yaml, 'r') as f:
with open(each_api_yaml) as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
bw_apis.extend(api_list)
Expand Down
Loading