Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][UP008] remove super call with parameters #51810

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions paddle/fluid/imperative/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ https://www.tensorflow.org/guide/eager

## API
```python
class Layer(object):
class Layer:

def __call__(inputs):
# build some parameter once.
Expand Down Expand Up @@ -49,7 +49,7 @@ Longer term.
```python

# Parent class.
class PyVarBase(object):
class PyVarBase:
pass

# Current python variable.
Expand Down Expand Up @@ -139,7 +139,7 @@ TODO
import paddle
class MyLayer(fluid.imperative.Layer):
def __init__(self):
super(MyLayer, self).__init__()
super().__init__()

def forward(self, inputs):
x = fluid.layers.relu(inputs)
Expand All @@ -150,7 +150,7 @@ class MyLayer(fluid.imperative.Layer):

class MyPyLayer(fluid.imperative.PyLayer):
def __init__(self):
super(MyPyLayer, self).__init__()
super().__init__()

@staticmethod
def forward(inputs):
Expand All @@ -172,7 +172,7 @@ with fluid.imperative.guard():

class MLP(fluid.Layer):
def __init__(self, input_size):
super(MLP, self).__init__()
super().__init__()
self._linear1 = Linear(input_size,
3,
fluid.ParamAttr(
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ select = [
# Pyupgrade
"UP001",
"UP003",
"UP004",
"UP007",
"UP008",
"UP010",
"UP011",
"UP013",
Expand Down
8 changes: 4 additions & 4 deletions python/paddle/device/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,7 @@ def get_available_custom_device():
return core.get_available_custom_device()


class Event(object):
class Event:
'''
A device event wrapper around StreamBase.
Parameters:
Expand Down Expand Up @@ -668,7 +668,7 @@ def __repr__(self):
return self.event_base


class Stream(object):
class Stream:
'''
A device stream wrapper around StreamBase.
Parameters:
Expand Down Expand Up @@ -840,7 +840,7 @@ def _as_parameter_(self):

def __eq__(self, o):
if isinstance(o, Stream):
return super(Stream, self).__eq__(o)
return super().__eq__(o)
return False

def __hash__(self):
Expand Down Expand Up @@ -936,7 +936,7 @@ def set_stream(stream):
return prev_stream


class stream_guard(object):
class stream_guard:
'''
Notes:
This API only supports dynamic graph mode currently.
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/fleet/base/runtime_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
__all__ = []


class RuntimeFactory(object):
class RuntimeFactory:
def __init__(self):
pass

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/fleet/recompute/recompute.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,7 @@ class Naive_fc_net(paddle.nn.Layer):
def __init__(self, input_size=10,
recompute_blocks=[1, 3],
recompute_kwargs={}):
super(Naive_fc_net, self).__init__()
super().__init__()
self.recompute_blocks = recompute_blocks
self.recompute_kwargs = recompute_kwargs
self.runfunc0 = get_fc_block(0, input_size, is_last=False)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/passes/auto_parallel_bf16.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
world_process_group = get_world_process_group()


class BF16State(object):
class BF16State:
def __init__(self, block):
self._block: Block = block
self._op_bf16_dict = {}
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/distributed/passes/auto_parallel_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -1787,7 +1787,7 @@ def group_param(sharding_info, fuse_size):
return group_to_param_map, param_to_group_map


class ShardingInfo(object):
class ShardingInfo:
def __init__(self, group, rank, params_grads, partition_algor):
self.group = group
self.params_grads = dict([(p.name, (p, g)) for p, g in params_grads])
Expand Down Expand Up @@ -1869,7 +1869,7 @@ def get_param_grad(self, param_name):
return self.params_grads.get(param_name, None)


class VarGroup(object):
class VarGroup:
def __init__(self, max_size):
self.max_siez = max_size
self.dtype = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def forward(self, inputs):

class Encoder(paddle.nn.Layer):
def __init__(self, encoder):
super(Encoder, self).__init__()
super().__init__()
self.first_stage = paddle.nn.Linear(1024, 1024)
self.encoder = encoder

Expand All @@ -73,7 +73,7 @@ def forward(self, x):

class Decoder(paddle.nn.Layer):
def __init__(self, decoder):
super(Decoder, self).__init__()
super().__init__()
self.decoder = decoder
self.final_stage = paddle.nn.Linear(1024, 1024)
self.group_norm = paddle.nn.GroupNorm(64, 1024)
Expand All @@ -87,7 +87,7 @@ def forward(self, x):

class SpecialModel(paddle.nn.Layer):
def __init__(self):
super(SpecialModel, self).__init__()
super().__init__()
self.shared = paddle.nn.Linear(1024, 1024, bias_attr=False)
self.encoder = Encoder(self.shared)
self.decoder = Decoder(self.shared)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

class Model(nn.Layer):
def __init__(self):
super(Model, self).__init__()
super().__init__()
self.first_stage = nn.Linear(4096, 4096, bias_attr=False)
self.center_stage = nn.Linear(4096, 4096)
self.center_stage.weight.stop_gradient = True
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __init__(
),
LayerDesc(Linear, in_features=linear_size, out_features=10),
]
super(MLP_pipe, self).__init__(
super().__init__(
desc,
num_stages=2,
loss_fn=paddle.nn.CrossEntropyLoss(),
Expand All @@ -94,7 +94,7 @@ def __init__(
param_attr=None,
bias_attr=None,
):
super(MLP_Hybrid, self).__init__()
super().__init__()
self.embedding = VocabParallelEmbedding(embedding_size, linear_size)
self._linear1 = RowParallelLinear(
linear_size, linear_size, has_bias=True, input_is_parallel=True
Expand Down Expand Up @@ -129,7 +129,7 @@ def __init__(
param_attr=None,
bias_attr=None,
):
super(MLP, self).__init__()
super().__init__()
self.embedding = paddle.nn.Embedding(embedding_size, linear_size)
self._linear1 = Linear(linear_size, linear_size)
self._linear2 = Linear(linear_size, linear_size)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(
pre_ln=True,
attn_dropout=True,
):
super(MultiHeadAttentionWithMP, self).__init__()
super().__init__()
self.embed_dim = embed_dim
self.kdim = embed_dim
self.vdim = embed_dim
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

class Model(paddle.nn.Layer):
def __init__(self, block_idx, input_size, is_last=False):
super(Model, self).__init__()
super().__init__()
block_name = "block_" + str(block_idx)
self.block = paddle.nn.Sequential(
(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def verify_op_count(op_types, op_name, target_count):

class MultiFCLayer(nn.Layer):
def __init__(self, hidden, Activation):
super(MultiFCLayer, self).__init__()
super().__init__()
self.linear1 = paddle.nn.Linear(hidden, 4 * hidden)
self.linear2 = paddle.nn.Linear(4 * hidden, hidden)
self.linear3 = paddle.nn.Linear(hidden, hidden)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

class Net(paddle.nn.Layer):
def __init__(self):
super(Net, self).__init__()
super().__init__()

@paddle.jit.to_static
def forward(self, x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

class Net(paddle.nn.Layer):
def __init__(self):
super(Net, self).__init__()
super().__init__()
self.relu = paddle.nn.functional.relu
self.fc = paddle.nn.Linear(4, 4)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def apply_to_static(net, use_cinn):

class PrimeNet(paddle.nn.Layer):
def __init__(self):
super(PrimeNet, self).__init__()
super().__init__()
self.fc = paddle.nn.Linear(4, 4)

def forward(self, x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def generate_data(shape, dtype="float32"):

class PrimeNet(paddle.nn.Layer):
def __init__(self, approximate):
super(PrimeNet, self).__init__()
super().__init__()
self.fc = paddle.nn.Linear(4, 4)
self.approximate = approximate

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def apply_to_static(net, use_cinn):

class PrimeNet(paddle.nn.Layer):
def __init__(self):
super(PrimeNet, self).__init__()
super().__init__()
self.fc = paddle.nn.Linear(64, 64)

def forward(self, x, w, b):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class PrimeNet(
paddle.nn.Layer,
):
def __init__(self):
super(PrimeNet, self).__init__()
super().__init__()
self.fc = paddle.nn.Linear(4, 4)

def forward(self, x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def set_func(self):

class NotToStaticHelper(paddle.nn.Layer):
def __init__(self):
super(NotToStaticHelper, self).__init__()
super().__init__()

def sum(self, x):
if x.shape[0] > 1:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def func(self, x):

class Net2:
def __init__(self):
super(Net2, self).__init__()
super().__init__()
self.layer1 = paddle.nn.Linear(10, 10)

def forward(self, data):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def apply_to_static(support_to_static, model, image_shape=None):

class Layer0(nn.Layer):
def __init__(self, level):
super(Layer0, self).__init__()
super().__init__()
self._linear1 = nn.Linear(10, 5)
self._linear2 = nn.Linear(10, 5)
self.layer1 = Layer1(level)
Expand All @@ -51,7 +51,7 @@ def forward(self, x):

class Layer1(nn.Layer):
def __init__(self, level):
super(Layer1, self).__init__()
super().__init__()
self.level = level
self._linear = nn.Linear(5, 2)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

class SimpleLayer(paddle.nn.Layer):
def __init__(self):
super(SimpleLayer, self).__init__()
super().__init__()
self.conv = paddle.nn.Conv2D(
in_channels=3, out_channels=1, kernel_size=2, stride=1
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def getdtype(dtype="float32"):

class BackendPaddle:
def __init__(self):
super(BackendPaddle, self).__init__()
super().__init__()
self.h2d_time = []
self.compute_time = []
self.d2h_time = []
Expand Down Expand Up @@ -341,7 +341,7 @@ def forward(self, inputs):

class Test(nn.Layer):
def __init__(self):
super(Test, self).__init__()
super().__init__()
self.conv = ConvBNLayer(
num_channels=3, num_filters=64, filter_size=3, stride=2, act='relu'
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

class SimpleNet(nn.Layer):
def __init__(self):
super(SimpleNet, self).__init__()
super().__init__()
self.conv1 = nn.Conv2D(
in_channels=4,
out_channels=4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def apply_to_static(net, use_cinn):

class PrimeNet(paddle.nn.Layer):
def __init__(self):
super(PrimeNet, self).__init__()
super().__init__()
self.conv = nn.Conv2D(4, 2, (3, 3), bias_attr=False)
self.bn = BatchNorm(2, act="relu")
self.run_mean = zeros([2])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def apply_to_static(net, use_cinn):

class PrimeNet(paddle.nn.Layer):
def __init__(self, n_shape):
super(PrimeNet, self).__init__()
super().__init__()
self.ln = LayerNorm(n_shape)

def forward(self, x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def apply_to_static(net, use_cinn):

class PrimeNet(paddle.nn.Layer):
def __init__(self):
super(PrimeNet, self).__init__()
super().__init__()
self.sf = F.softmax

def forward(self, x, current_axis):
Expand Down
Loading