diff --git a/tools/diff_use_default_grad_op_maker.py b/tools/diff_use_default_grad_op_maker.py index a8e82fe657fd14..51759b8ab0d773 100644 --- a/tools/diff_use_default_grad_op_maker.py +++ b/tools/diff_use_default_grad_op_maker.py @@ -30,7 +30,7 @@ def generate_spec(filename): def read_spec(filename): with open(filename, 'r') as f: - return {line.strip() for line in f.readlines()} + return {line.strip() for line in f} def get_spec_diff(dev_filename, pr_filename): diff --git a/tools/externalError/spider.py b/tools/externalError/spider.py index bf6af68ff64ad2..83fa8b6d511713 100644 --- a/tools/externalError/spider.py +++ b/tools/externalError/spider.py @@ -34,19 +34,19 @@ def parsing(externalErrorDesc): ssl._create_default_https_context = ssl._create_unverified_context html = urllib.request.urlopen(url).read().decode('utf-8') res_div = r'
CUDA error types
.*?.*?
)' res_p_detail = r'(.*?)
' - list_p = re.findall(res_p, m_message, re.S | re.M) - list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M) + list_p = re.findall(res_p, m_message, re.DOTALL | re.MULTILINE) + list_p_detail = re.findall( + res_p_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_p) == len(list_p_detail) for idx in range(len(list_p)): m_message = m_message.replace(list_p[idx], list_p_detail[idx]) @@ -94,13 +98,13 @@ def parsing(externalErrorDesc): html = urllib.request.urlopen(url).read().decode('utf-8') res_div = r'CURAND function call status types
.*?.*?
' res_class_detail = r'(.*?)
' - list_class = re.findall(res_class, m_message, re.S | re.M) - list_class_detail = re.findall(res_class_detail, m_message, re.S | re.M) + list_class = re.findall(res_class, m_message, re.DOTALL | re.MULTILINE) + list_class_detail = re.findall( + res_class_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_class) == len(list_class_detail) for idx in range(len(list_class)): m_message = m_message.replace( @@ -163,24 +169,28 @@ def parsing(externalErrorDesc): res_a = r'()' res_shape = r'(.*?)' - list_a = re.findall(res_a, m_message, re.S | re.M) - list_shape = re.findall(res_shape, m_message, re.S | re.M) + list_a = re.findall(res_a, m_message, re.DOTALL | re.MULTILINE) + list_shape = re.findall(res_shape, m_message, re.DOTALL | re.MULTILINE) assert len(list_a) == len(list_shape) for idx in range(len(list_a)): m_message = m_message.replace(list_a[idx], list_shape[idx]) res_span = r'(.*?)' res_span_detail = r'(.*?)' - list_span = re.findall(res_span, m_message, re.S | re.M) - list_span_detail = re.findall(res_span_detail, m_message, re.S | re.M) + list_span = re.findall(res_span, m_message, re.DOTALL | re.MULTILINE) + list_span_detail = re.findall( + res_span_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_span) == len(list_span_detail) for idx in range(len(list_span)): m_message = m_message.replace(list_span[idx], list_span_detail[idx]) res_samp = r'(.*?)' res_samp_detail = r'(.*?)' - list_samp = re.findall(res_samp, m_message, re.S | re.M) - list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M) + list_samp = re.findall(res_samp, m_message, re.DOTALL | re.MULTILINE) + list_samp_detail = re.findall( + res_samp_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_samp) == len(list_samp_detail) for idx in range(len(list_samp)): m_message = m_message.replace(list_samp[idx], list_samp_detail[idx]) @@ -216,10 +226,10 @@ def parsing(externalErrorDesc): html = urllib.request.urlopen(url).read().decode('utf-8') res_div = r'The type is used for function status returns. All cuBLAS library.*?
(.*?)
.*?colspan="1">(.*?)' - m_dt = re.findall(res_dt, m_div, re.S | re.M) + m_dt = re.findall(res_dt, m_div, re.DOTALL | re.MULTILINE) for error in m_dt: m_message = error[1] @@ -227,16 +237,20 @@ def parsing(externalErrorDesc): res_p = r'.*?
' res_p_detail = r'(.*?)
' - list_p = re.findall(res_p, m_message, re.S | re.M) - list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M) + list_p = re.findall(res_p, m_message, re.DOTALL | re.MULTILINE) + list_p_detail = re.findall( + res_p_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_p) == len(list_p_detail) for idx in range(len(list_p)): m_message = m_message.replace(list_p[idx], list_p_detail[idx]) res_samp = r'.*?' res_samp_detail = r'(.*?)' - list_samp = re.findall(res_samp, m_message, re.S | re.M) - list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M) + list_samp = re.findall(res_samp, m_message, re.DOTALL | re.MULTILINE) + list_samp_detail = re.findall( + res_samp_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_samp) == len(list_samp_detail) for idx in range(len(list_samp)): m_message = m_message.replace(list_samp[idx], list_samp_detail[idx]) @@ -278,12 +292,12 @@ def parsing(externalErrorDesc): html = urllib.request.urlopen(url).read().decode('utf-8') res_div = r'This is a status type returned by the library functions and.*?.*?
' res_p_detail = r'(.*?)
' - list_p = re.findall(res_p, m_message, re.S | re.M) - list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M) + list_p = re.findall(res_p, m_message, re.DOTALL | re.MULTILINE) + list_p_detail = re.findall( + res_p_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_p) == len(list_p_detail) for idx in range(len(list_p)): m_message = m_message.replace(list_p[idx], list_p_detail[idx]) res_samp = r'.*?' res_samp_detail = r'(.*?)' - list_samp = re.findall(res_samp, m_message, re.S | re.M) - list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M) + list_samp = re.findall(res_samp, m_message, re.DOTALL | re.MULTILINE) + list_samp_detail = re.findall( + res_samp_detail, m_message, re.DOTALL | re.MULTILINE + ) assert len(list_samp) == len(list_samp_detail) for idx in range(len(list_samp)): m_message = m_message.replace(list_samp[idx], list_samp_detail[idx]) res_strong = r'.*?' res_strong_detail = r'(.*?)' - list_strong = re.findall(res_strong, m_message, re.S | re.M) + list_strong = re.findall( + res_strong, m_message, re.DOTALL | re.MULTILINE + ) list_strong_detail = re.findall( - res_strong_detail, m_message, re.S | re.M + res_strong_detail, m_message, re.DOTALL | re.MULTILINE ) assert len(list_strong) == len(list_strong_detail) for idx in range(len(list_strong)): @@ -331,10 +351,10 @@ def parsing(externalErrorDesc): allMessageDesc.type = external_error_pb2.NCCL html = urllib.request.urlopen(url).read().decode('utf-8') res_div = r'ncclResult_t
(.*?)'
- m_div = re.findall(res_div, html, re.S | re.M)[0]
+ m_div = re.findall(res_div, html, re.DOTALL | re.MULTILINE)[0]
res_dt = r'(.*?)
.*?(.*?)\)(.*?)\n'
- m_dt = re.findall(res_dt, m_div, re.S | re.M)
+ m_dt = re.findall(res_dt, m_div, re.DOTALL | re.MULTILINE)
for error in m_dt:
m_message = re.sub(r'\n', '', error[2])
_Messages = allMessageDesc.messages.add()
diff --git a/tools/gen_ut_cmakelists.py b/tools/gen_ut_cmakelists.py
index 9749fe320b4149..659d64cb52b873 100644
--- a/tools/gen_ut_cmakelists.py
+++ b/tools/gen_ut_cmakelists.py
@@ -333,7 +333,7 @@ def parse_assigned_dist_ut_ports(self, current_work_dir, depth=0):
)
) as csv_file:
found = False
- for line in csv_file.readlines():
+ for line in csv_file:
(
name,
_,
diff --git a/tools/get_single_test_cov.py b/tools/get_single_test_cov.py
index 3b5ed6fa8c69b4..c107ddb1c08e12 100644
--- a/tools/get_single_test_cov.py
+++ b/tools/get_single_test_cov.py
@@ -108,7 +108,7 @@ def analysisFNDAFile(rootPath, test):
matchObj = re.match(
r'(.*)Maker(.*)|(.*)Touch(.*)Regist(.*)|(.*)Touch(.*)JitKernel(.*)|(.*)converterC2Ev(.*)',
fn,
- re.I,
+ re.IGNORECASE,
)
if matchObj is None:
OP_REGIST = False
diff --git a/tools/get_ut_file_map.py b/tools/get_ut_file_map.py
index a7f00a67dc3706..9b7d8cce5ac9e8 100644
--- a/tools/get_ut_file_map.py
+++ b/tools/get_ut_file_map.py
@@ -66,7 +66,7 @@ def handle_ut_file_map(rootPath):
all_ut = f'{rootPath}/build/all_uts_paddle'
with open(all_ut, 'r') as f:
all_ut_list = []
- for ut in f.readlines():
+ for ut in f:
ut = ut.replace('\n', '')
all_ut_list.append(ut.strip())
f.close()
@@ -190,7 +190,7 @@ def ut_file_map_supplement(rootPath):
with open(all_uts_paddle, 'r') as f:
all_uts_paddle_list = []
- for ut in f.readlines():
+ for ut in f:
all_uts_paddle_list.append(ut.strip())
f.close()
@@ -204,7 +204,7 @@ def ut_file_map_supplement(rootPath):
prec_delta_new = f"{rootPath}/build/prec_delta"
with open(prec_delta_new, 'r') as f:
prec_delta_new_list = []
- for ut in f.readlines():
+ for ut in f:
prec_delta_new_list.append(ut.strip())
f.close()
prec_delta_new_list.append(
diff --git a/tools/parse_kernel_info.py b/tools/parse_kernel_info.py
index 89ea4e3ad44b3a..b653a6507ffe37 100644
--- a/tools/parse_kernel_info.py
+++ b/tools/parse_kernel_info.py
@@ -131,7 +131,7 @@ def parse_paddle_kernels(lib="phi", kernel_type="function", print_detail=False):
print(
f"{value.op_type.ljust(max_op_type_lengths + 4)} : {value.supported_dtypes}"
)
- print("")
+ print()
return stats
@@ -155,7 +155,7 @@ def main(lib):
print(f"phi function kernels : {phi_function_kernels_stats}")
print(f"phi structure kernels : {phi_structure_kernels_stats}")
print(f"phi all kernels : {phi_all_kernels_stats}")
- print("")
+ print()
else:
fluid_ops_stats = parse_paddle_kernels(lib, "fluid", print_detail=False)
phi_ops_stats = parse_paddle_kernels(lib, "phi", print_detail=False)
@@ -168,7 +168,7 @@ def main(lib):
print(f"fluid operators : {fluid_ops_stats}")
print(f"phi operators : {phi_ops_stats}")
print(f"all operators : {all_ops_stats}")
- print("")
+ print()
main(lib="fluid")
diff --git a/tools/sampcd_processor.py b/tools/sampcd_processor.py
index 5a87a35b9f6845..76757cb2434ab9 100644
--- a/tools/sampcd_processor.py
+++ b/tools/sampcd_processor.py
@@ -80,7 +80,7 @@ def _patch_tensor_place():
(.*?) # Place=(XXX)
(\,.*?\))
""",
- re.X | re.S,
+ re.VERBOSE | re.DOTALL,
)
_check_output = checker.check_output
@@ -128,7 +128,7 @@ def _patch_float_precision(digits):
)?
)
""",
- re.X | re.S,
+ re.VERBOSE | re.DOTALL,
)
_check_output = checker.check_output
@@ -201,7 +201,7 @@ class TimeoutDirective(Directive):
)
)
""",
- re.X | re.S,
+ re.VERBOSE | re.DOTALL,
)
def __init__(self, timeout):
@@ -239,7 +239,7 @@ class SingleProcessDirective(Directive):
\s
)
""",
- re.X | re.S,
+ re.VERBOSE | re.DOTALL,
)
def parse_directive(self, docstring):
@@ -276,7 +276,7 @@ class Fluid(BadStatement):
.*
(\bfluid\b)
""",
- re.X,
+ re.VERBOSE,
)
def check(self, docstring):
@@ -300,7 +300,7 @@ class SkipNoReason(BadStatement):
[+]SKIP
(?P