Skip to content

Commit

Permalink
[CodeStyle][PLR1701] unify multiple isinstance expressions as one (#5…
Browse files Browse the repository at this point in the history
  • Loading branch information
Kim authored Mar 28, 2023
1 parent c05feb9 commit c1838da
Show file tree
Hide file tree
Showing 33 changed files with 123 additions and 171 deletions.
2 changes: 1 addition & 1 deletion python/paddle/audio/functional/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def compute_fbank_matrix(
if norm == 'slaney':
enorm = 2.0 / (mel_f[2 : n_mels + 2] - mel_f[:n_mels])
weights *= enorm.unsqueeze(1)
elif isinstance(norm, int) or isinstance(norm, float):
elif isinstance(norm, (int, float)):
weights = paddle.nn.functional.normalize(weights, p=norm, axis=-1)

return weights
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/auto_parallel/dist_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def __call__(self, *args, **kwargs):
output = self._serial_op(*args, **kwargs)
new_op_size = len(cur_block.ops)

if isinstance(output, tuple) or isinstance(output, list):
if isinstance(output, (tuple, list)):
new_output = list(output)
elif isinstance(output, Variable):
new_output = [output]
Expand Down
4 changes: 1 addition & 3 deletions python/paddle/distributed/auto_parallel/reshard.py
Original file line number Diff line number Diff line change
Expand Up @@ -1948,9 +1948,7 @@ def parse_op_desc(
)
idx = idx_list[0]

elif isinstance(op_desc, SliceOpDesc) or isinstance(
op_desc, AllGatherConcatOpDesc
):
elif isinstance(op_desc, (SliceOpDesc, AllGatherConcatOpDesc)):
target_tensor = None
if isinstance(op_desc, SliceOpDesc):
assert (
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/fleet/base/util_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def _params_check(self, config):
def feed_gen(batch_size, feeded_vars_dims, feeded_vars_filelist):
def reader(batch_size, fn, dim):
data = []
if isinstance(dim, list) or isinstance(dim, tuple):
if isinstance(dim, (list, tuple)):
shape = list(dim)
_temp = 1
for x in dim:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,14 @@ def _can_apply(self):
if self.role_maker._worker_num() <= 1:
return False

return (
isinstance(self.inner_opt, paddle.optimizer.momentum.Momentum)
or isinstance(self.inner_opt, paddle.fluid.optimizer.Momentum)
or isinstance(self.inner_opt, paddle.optimizer.sgd.SGD)
or isinstance(self.inner_opt, paddle.fluid.optimizer.SGD)
return isinstance(
self.inner_opt,
(
paddle.optimizer.momentum.Momentum,
paddle.fluid.optimizer.Momentum,
paddle.optimizer.sgd.SGD,
paddle.fluid.optimizer.SGD,
),
)

def _disable_strategy(self, dist_strategy):
Expand Down Expand Up @@ -228,11 +231,14 @@ def _can_apply(self):
if self.role_maker._worker_num() <= 1:
return False

return (
isinstance(self.inner_opt, paddle.optimizer.Momentum)
or isinstance(self.inner_opt, paddle.fluid.optimizer.Momentum)
or isinstance(self.inner_opt, paddle.optimizer.sgd.SGD)
or isinstance(self.inner_opt, paddle.fluid.optimizer.SGD)
return isinstance(
self.inner_opt,
(
paddle.optimizer.Momentum,
paddle.fluid.optimizer.Momentum,
paddle.optimizer.sgd.SGD,
paddle.fluid.optimizer.SGD,
),
)

def _disable_strategy(self, dist_strategy):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -795,9 +795,7 @@ def random_to_skip():
if isinstance(threshold, float):
atol = threshold
rtol = 1e-8
elif isinstance(threshold, list) or isinstance(
threshold, tuple
):
elif isinstance(threshold, (list, tuple)):
atol = threshold[0]
rtol = threshold[1]
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def setUp(self):
scale_w = 0

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
scale_h = float(self.scale)
scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def setUp(self):
scale_w = 0

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
scale_h = float(self.scale)
scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
12 changes: 5 additions & 7 deletions python/paddle/fluid/tests/unittests/test_bicubic_interp_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,15 @@ def bicubic_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
SizeTensor, tuple
):
if not isinstance(SizeTensor, (list, tuple)):
SizeTensor = [SizeTensor]
return paddle._C_ops.bicubic_interp(
x,
Expand Down Expand Up @@ -197,7 +195,7 @@ def setUp(self):
in_w = self.input_shape[2]

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -236,7 +234,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,12 @@ def bilinear_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
Expand Down Expand Up @@ -169,7 +169,7 @@ def setUp(self):
scale_h = 0
scale_w = 0
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -210,7 +210,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -363,7 +363,7 @@ def setUp(self):
).astype("uint8")

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -400,7 +400,7 @@ def setUp(self):
'align_mode': self.align_mode,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -537,7 +537,7 @@ def setUp(self):
if self.scale_by_1Dtensor:
self.inputs['Scale'] = np.array([self.scale]).astype("float32")
elif self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand All @@ -564,7 +564,7 @@ def setUp(self):
self.attrs['out_h'] = self.out_h
self.attrs['out_w'] = self.out_w
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
11 changes: 3 additions & 8 deletions python/paddle/fluid/tests/unittests/test_desc_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,7 @@ def operator_equal(a, b):
raise ValueError("In operator_equal not equal\n")

for k, v in a.__dict__.items():
if isinstance(v, fluid.framework.Program) or isinstance(
v, fluid.framework.Block
):
if isinstance(v, (fluid.framework.Program, fluid.framework.Block)):
continue

elif isinstance(v, core.OpDesc):
Expand All @@ -137,13 +135,10 @@ def operator_equal(a, b):

def block_equal(a, b):
for k, v in a.__dict__.items():
if (
isinstance(v, core.ProgramDesc)
or isinstance(v, fluid.framework.Program)
or isinstance(v, core.BlockDesc)
if isinstance(
v, (core.ProgramDesc, fluid.framework.Program, core.BlockDesc)
):
continue

elif k == "ops":
assert len(a.ops) == len(b.ops)
for i in range(0, len(a.ops)):
Expand Down
16 changes: 8 additions & 8 deletions python/paddle/fluid/tests/unittests/test_linear_interp_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@ def linear_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
Expand Down Expand Up @@ -138,7 +138,7 @@ def setUp(self):
in_w = self.input_shape[1]

if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = float(self.scale)
if isinstance(self.scale, list):
self.scale = float(self.scale[0])
Expand Down Expand Up @@ -170,7 +170,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = [float(self.scale)]
self.attrs['scale'] = self.scale
self.outputs = {'Out': output_np}
Expand Down Expand Up @@ -262,7 +262,7 @@ def setUp(self):
in_w = self.input_shape[1]

if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = float(self.scale)
if isinstance(self.scale, list):
self.scale = float(self.scale[0])
Expand Down Expand Up @@ -302,7 +302,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
self.scale = [self.scale[0], self.scale[0]]
Expand Down Expand Up @@ -343,7 +343,7 @@ def setUp(self):
input_np = np.random.random(self.input_shape).astype("uint8")

if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = float(self.scale)
if isinstance(self.scale, list):
self.scale = float(self.scale[0])
Expand Down Expand Up @@ -371,7 +371,7 @@ def setUp(self):
'align_mode': self.align_mode,
}
if self.scale > 0:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
self.scale = [self.scale[0], self.scale[0]]
Expand Down
16 changes: 8 additions & 8 deletions python/paddle/fluid/tests/unittests/test_nearest_interp_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,12 @@ def nearest_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
Expand Down Expand Up @@ -233,7 +233,7 @@ def setUp(self):
scale_h = 0
scale_w = 0
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_d = scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -305,7 +305,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -438,7 +438,7 @@ def setUp(self):
).astype("uint8")

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -472,7 +472,7 @@ def setUp(self):
'align_corners': self.align_corners,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -583,7 +583,7 @@ def setUp(self):
if self.scale_by_1Dtensor:
self.inputs['Scale'] = np.array([self.scale]).astype("float64")
elif self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand All @@ -610,7 +610,7 @@ def setUp(self):
self.attrs['out_h'] = self.out_h
self.attrs['out_w'] = self.out_w
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
Loading

0 comments on commit c1838da

Please sign in to comment.