Skip to content

Commit

Permalink
[cherry-pick2.4]for CodeStyle (PaddlePaddle#47608)
Browse files Browse the repository at this point in the history
* only run pre-commit

* only run pre-commit
  • Loading branch information
Ligoml authored Nov 4, 2022
1 parent 99c872f commit cfee9c1
Show file tree
Hide file tree
Showing 57 changed files with 13,869 additions and 9,290 deletions.
112 changes: 55 additions & 57 deletions python/paddle/autograd/py_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,16 @@ def __init__(self):
def save_for_backward(self, *tensors):
"""
Saves given tensors that backward need. Use ``saved_tensor`` in the `backward` to get the saved tensors.
.. note::
This API should be called at most once, and only inside `forward`.
This API should be called at most once, and only inside `forward`.
Args:
tensors(list of Tensors): Tensors to be stored.
Returns:
None
Examples:
.. code-block:: python
Expand Down Expand Up @@ -94,7 +94,7 @@ def saved_tensor(self):
Get the tensors stored by ``save_for_backward``.
Returns:
list of Tensors or None: If context contains tensors stored by `save_for_backward`,
list of Tensors or None: If context contains tensors stored by `save_for_backward`,
then return these tensors, otherwise return None.
Examples:
Expand Down Expand Up @@ -124,17 +124,14 @@ def backward(ctx, dy):


def with_mateclass(meta, *bases):

class impl(meta):

def __new__(cls, name, temp_bases, attrs):
return meta(name, bases, attrs)

return type.__new__(impl, "impl", (), {})


class CPyLayer(object):

@classmethod
@dygraph_only
def apply(cls, *args, **kwargs):
Expand All @@ -147,7 +144,7 @@ def apply(cls, *args, **kwargs):
Returns:
tensors or other types : output of PyLayer.
Examples:
.. code-block:: python
Expand Down Expand Up @@ -182,12 +179,14 @@ def backward(ctx, dy):


class PyLayerBackward(LegacyPyLayerContext):

def backward(self, *args, **kwargs):
with paddle.fluid.dygraph.guard():
with paddle.fluid.dygraph.no_grad():
if self._amp_state and 'enable' in self._amp_state and self._amp_state[
'enable']:
if (
self._amp_state
and 'enable' in self._amp_state
and self._amp_state['enable']
):
with auto_cast(**args[0]._amp_state):
return self._forward_cls.backward(*args, **kwargs)
else:
Expand All @@ -197,10 +196,10 @@ def backward(self, *args, **kwargs):


class LayerMeta(type):

def __init__(cls, name, bases, attrs):
cls._backward_function = type(name + '_backward', (PyLayerBackward, ),
{"_forward_cls": cls})
cls._backward_function = type(
name + '_backward', (PyLayerBackward,), {"_forward_cls": cls}
)

return super(LayerMeta, cls).__init__(name, bases, attrs)

Expand All @@ -210,15 +209,15 @@ class LegacyPyLayer(with_mateclass(LayerMeta, CPyLayer)):
Build a custom `Layer` by creating subclasses. Subclasses need to follow the following rules:
1. Subclasses contain `forward` and `backward` function. Both forward and backward are @staticmethod.
Their first argument should be a context and `None` can not be included in the returned result.
2. Input of backward contains a context as the first argument, and the rest arguments are the
gradient of forward's output tensors. so the number of backward's input tensors equal to
the number of forward output tensors. If you need the forward's inputs or outputs in `backward`,
2. Input of backward contains a context as the first argument, and the rest arguments are the
gradient of forward's output tensors. so the number of backward's input tensors equal to
the number of forward output tensors. If you need the forward's inputs or outputs in `backward`,
you can use `save_for_backward` to store the required tensors, and then use them in the backward.
3. Output of backward function can only be `Tensor` or tuple/list of `Tensor`.
Output tensors of backward are the gradient of forward's input tensors,
Output tensors of backward are the gradient of forward's input tensors,
so the number of backward's output tensors equal to the number of forward input tensors.
After building the custom Layer, run it through the `apply` method.
Examples:
.. code-block:: python
Expand Down Expand Up @@ -259,8 +258,8 @@ def backward(ctx, dy):
@staticmethod
def forward(ctx, *args, **kwargs):
"""
It is to be overloaded by subclasses. It must accept a object of `PyLayerContext` as
the first argument, followed by any number of arguments (tensors or other types).
It is to be overloaded by subclasses. It must accept a object of `PyLayerContext` as
the first argument, followed by any number of arguments (tensors or other types).
`None` can not be included in the returned result.
Args:
Expand All @@ -269,7 +268,7 @@ def forward(ctx, *args, **kwargs):
Returns:
tensors or other types : output of PyLayer.
Examples:
.. code-block:: python
Expand All @@ -292,14 +291,15 @@ def backward(ctx, dy):
return grad
"""
raise NotImplementedError(
"You must implement the forward function for PyLayer.")
"You must implement the forward function for PyLayer."
)

@staticmethod
def backward(ctx, *args, **kwargs):
"""
This is a function to calculate the gradient. It is to be overloaded by subclasses.
It must accept a object of `PyLayerContext` as the first argument, and the rest
arguments are the gradient of forward's output tensors. Output tensors of backward
This is a function to calculate the gradient. It is to be overloaded by subclasses.
It must accept a object of `PyLayerContext` as the first argument, and the rest
arguments are the gradient of forward's output tensors. Output tensors of backward
are the gradient of forward's input tensors.
Args:
Expand All @@ -308,7 +308,7 @@ def backward(ctx, *args, **kwargs):
Returns:
Tensor or list of Tensors: The gradient of forward's input tensor(s).
Examples:
.. code-block:: python
Expand All @@ -332,24 +332,24 @@ def backward(ctx, dy):
"""

raise NotImplementedError(
"You must implement the backward function for PyLayer.")
"You must implement the backward function for PyLayer."
)


class EagerPyLayerContext(object):

def save_for_backward(self, *tensors):
"""
Saves given tensors that backward need. Use ``saved_tensor`` in the `backward` to get the saved tensors.
.. note::
This API should be called at most once, and only inside `forward`.
This API should be called at most once, and only inside `forward`.
Args:
tensors(list of Tensors): Tensors to be stored.
Returns:
None
Examples:
.. code-block:: python
Expand Down Expand Up @@ -380,7 +380,7 @@ def saved_tensor(self):
Get the tensors stored by ``save_for_backward``.
Returns:
list of Tensors or None: If context contains tensors stored by `save_for_backward`,
list of Tensors or None: If context contains tensors stored by `save_for_backward`,
then return these tensors, otherwise return None.
Examples:
Expand Down Expand Up @@ -410,11 +410,11 @@ def backward(ctx, dy):
def mark_not_inplace(self, *args):
"""
Marks inputs as not inplace.
This should be called at most once, only from inside the `forward` method,
This should be called at most once, only from inside the `forward` method,
and all arguments should be Tensor inputs.
If the Tensor returned by `forward` method is the same as the Tensor input of forward,
and this Tensor is marked as not_inplace, then Paddle will help the user create a new Tensor as output.
If the Tensor returned by `forward` method is the same as the Tensor input of forward,
and this Tensor is marked as not_inplace, then Paddle will help the user create a new Tensor as output.
Thereby preventing the auto grad information of the input Tensor from being overwritten.
Examples:
Expand All @@ -427,7 +427,7 @@ class Exp(paddle.autograd.PyLayer):
def forward(ctx, x):
ctx.mark_not_inplace(x)
return x
@staticmethod
def backward(ctx, grad_output):
out = grad_output.exp()
Expand All @@ -438,7 +438,7 @@ def backward(ctx, grad_output):
attn_layers = []
for idx in range(0, 2):
attn_layers.append(Exp())
for step in range(0, 2):
a = x
for j in range(0,2):
Expand All @@ -450,7 +450,7 @@ def backward(ctx, grad_output):
def mark_non_differentiable(self, *args):
"""
Marks outputs as non-differentiable.
This should be called at most once, only from inside the `forward` method,
This should be called at most once, only from inside the `forward` method,
and all arguments should be tensor outputs.
This will mark outputs as not requiring gradients, increasing the
Expand Down Expand Up @@ -542,30 +542,27 @@ def backward(ctx, grad, grad2):


class EagerPyLayerBackward(core.eager.PyLayer, EagerPyLayerContext):

def backward(self, *args):
return self._forward_cls.backward(self, *args)


class EagerPyLayerMeta(type):

def __init__(cls, name, bases, attrs):
cls._backward_function = type(name + '_backward',
(EagerPyLayerBackward, ),
{"_forward_cls": cls})
cls._backward_function = type(
name + '_backward', (EagerPyLayerBackward,), {"_forward_cls": cls}
)

return super(EagerPyLayerMeta, cls).__init__(name, bases, attrs)


class EagerPyLayer(
with_mateclass(EagerPyLayerMeta, core.eager.PyLayer,
EagerPyLayerContext)):

with_mateclass(EagerPyLayerMeta, core.eager.PyLayer, EagerPyLayerContext)
):
@staticmethod
def forward(ctx, *args, **kwargs):
"""
It is to be overloaded by subclasses. It must accept a object of `PyLayerContext` as
the first argument, followed by any number of arguments (tensors or other types).
It is to be overloaded by subclasses. It must accept a object of `PyLayerContext` as
the first argument, followed by any number of arguments (tensors or other types).
`None` can not be included in the returned result.
Args:
Expand All @@ -574,7 +571,7 @@ def forward(ctx, *args, **kwargs):
Returns:
tensors or other types : output of PyLayer.
Examples:
.. code-block:: python
Expand All @@ -597,14 +594,15 @@ def backward(ctx, dy):
return grad
"""
raise NotImplementedError(
"You must implement the forward function for PyLayer.")
"You must implement the forward function for PyLayer."
)

@staticmethod
def backward(ctx, *args):
"""
This is a function to calculate the gradient. It is to be overloaded by subclasses.
It must accept a object of `PyLayerContext` as the first argument, and the rest
arguments are the gradient of forward's output tensors. Output tensors of backward
This is a function to calculate the gradient. It is to be overloaded by subclasses.
It must accept a object of `PyLayerContext` as the first argument, and the rest
arguments are the gradient of forward's output tensors. Output tensors of backward
are the gradient of forward's input tensors.
Args:
Expand All @@ -613,7 +611,7 @@ def backward(ctx, *args):
Returns:
Tensor or list of Tensors: The gradient of forward's input tensor(s).
Examples:
.. code-block:: python
Expand All @@ -637,11 +635,11 @@ def backward(ctx, dy):
"""

raise NotImplementedError(
"You must implement the backward function for PyLayer.")
"You must implement the backward function for PyLayer."
)


def once_differentiable(backward):

def wrapper(ctx, *args):
with paddle.fluid.dygraph.no_grad():
outputs = backward(ctx, *args)
Expand Down
Loading

0 comments on commit cfee9c1

Please sign in to comment.