Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Typing][A-27,A-28,A-32,A-33,A-35,A-36] Add type annotations for paddle/nn/initializer/* #65206

Merged
merged 3 commits into from
Jun 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 19 additions & 5 deletions python/paddle/nn/initializer/assign.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

from typing import TYPE_CHECKING, Any, Sequence

import paddle
from paddle import _C_ops

Expand All @@ -23,6 +28,9 @@
)
from .initializer import Initializer

if TYPE_CHECKING:
import numpy.typing as npt

__all__ = []


Expand All @@ -38,19 +46,21 @@ class NumpyArrayInitializer(Initializer):

"""

def __init__(self, value):
def __init__(self, value: npt.NDArray[Any]) -> None:
import numpy

assert isinstance(value, numpy.ndarray)
super().__init__()
self._value = value

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: paddle.pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Numpy array.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -172,7 +182,7 @@ class Assign(NumpyArrayInitializer):

Args:
value (Tensor|numpy.ndarray|list|tuple): numpy array, list, tuple, or tensor to initialize the parameter.
name(str, optional): Normally there is no need for user to set this
name(str|None, optional): Normally there is no need for user to set this
property. For more information, please refer to :ref:`api_guide_Name`. Default is None.

Returns:
Expand Down Expand Up @@ -239,7 +249,11 @@ class Assign(NumpyArrayInitializer):
[6.]
"""

def __init__(self, value, name=None):
def __init__(
self,
value: npt.NDArray[Any] | Sequence[int] | paddle.Tensor,
name: str | None = None,
) -> None:
import numpy

check_type(
Expand Down
11 changes: 8 additions & 3 deletions python/paddle/nn/initializer/bilinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import numpy as np

import paddle
from paddle import _C_ops, pir

from ...base import core, framework, unique_name
Expand Down Expand Up @@ -74,16 +77,18 @@ class Bilinear(Initializer):

"""

def __init__(self):
def __init__(self) -> None:
"""Constructor for BilinearInitializer."""
super().__init__()

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Bilinear initialization.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/nn/initializer/constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def forward(
self,
var: paddle.Tensor,
block: paddle.pir.Block | None = None,
):
) -> paddle.Tensor | None:
"""Initialize the input tensor with constant.

Args:
Expand Down
22 changes: 20 additions & 2 deletions python/paddle/nn/initializer/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@

import functools
import math
from typing import TYPE_CHECKING, Literal

import numpy as np
from typing_extensions import TypeAlias

import paddle

Expand All @@ -28,6 +30,22 @@
)
from .lazy_init import lazy_init_helper

if TYPE_CHECKING:
_NonLinearity: TypeAlias = Literal[ # noqa: PYI047
"sigmoid",
"linear",
"conv1d",
"conv2d",
"conv3d",
"conv1d_transpose",
"conv2d_transpose",
"conv3d_transpose",
"tanh",
"relu",
"leaky_relu",
"selu",
]

__all__ = []


Expand All @@ -40,7 +58,7 @@ class Initializer:
directly, but need to use one of its implementations.
"""

def __init__(self):
def __init__(self) -> None:
pass

def __call__(
Expand All @@ -53,7 +71,7 @@ def __call__(

def forward(
self, param: paddle.Tensor, block: paddle.pir.Block | None = None
):
) -> paddle.Tensor | None:
"""Add corresponding initialization operations to the network."""
raise NotImplementedError()

Expand Down
38 changes: 28 additions & 10 deletions python/paddle/nn/initializer/kaiming.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

# TODO: define the initializers of Kaiming functions in neural network
import math
from typing import TYPE_CHECKING

import paddle
from paddle import _C_ops
Expand All @@ -26,6 +29,9 @@
)
from .initializer import Initializer, calculate_gain

if TYPE_CHECKING:
from .initializer import _NonLinearity

__all__ = []


Expand Down Expand Up @@ -64,12 +70,12 @@ class MSRAInitializer(Initializer):

def __init__(
self,
uniform=True,
fan_in=None,
seed=0,
negative_slope=0,
nonlinearity='relu',
):
uniform: bool = True,
fan_in: float | None = None,
seed: int = 0,
negative_slope: float = 0,
nonlinearity: _NonLinearity = 'relu',
) -> None:
"""Constructor for MSRAInitializer"""
assert uniform is not None
assert seed is not None
Expand All @@ -80,12 +86,14 @@ def __init__(
self._negative_slope = negative_slope
self._nonlinearity = nonlinearity

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: paddle.pir.Block | None = None
) -> paddle.Tensor | None:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这个返回值,基类以及标注过的几个文件也都统一一下吧

"""Initialize the input tensor with MSRA initialization.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -271,7 +279,12 @@ class KaimingNormal(MSRAInitializer):

"""

def __init__(self, fan_in=None, negative_slope=0.0, nonlinearity='relu'):
def __init__(
self,
fan_in: float | None = None,
negative_slope: float = 0.0,
nonlinearity: str = 'relu',
) -> None:
super().__init__(
uniform=False,
fan_in=fan_in,
Expand Down Expand Up @@ -317,7 +330,12 @@ class KaimingUniform(MSRAInitializer):

"""

def __init__(self, fan_in=None, negative_slope=0.0, nonlinearity='relu'):
def __init__(
self,
fan_in: float | None = None,
negative_slope: float = 0.0,
nonlinearity: str = 'relu',
) -> None:
super().__init__(
uniform=True,
fan_in=fan_in,
Expand Down
45 changes: 35 additions & 10 deletions python/paddle/nn/initializer/normal.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import paddle
from paddle import _C_ops, pir

from ...base import core, framework, unique_name
Expand All @@ -36,7 +39,9 @@ class NormalInitializer(Initializer):

"""

def __init__(self, loc=0.0, scale=1.0, seed=0):
def __init__(
self, loc: float = 0.0, scale: float = 1.0, seed: int = 0
) -> None:
assert loc is not None
assert scale is not None
assert seed is not None
Expand All @@ -45,12 +50,14 @@ def __init__(self, loc=0.0, scale=1.0, seed=0):
self._std_dev = scale
self._seed = seed

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Normal distribution.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -119,7 +126,7 @@ class Normal(NormalInitializer):
Args:
mean (float, optional): mean of the normal distribution. Default is 0.0.
std (float, optional): standard deviation of the normal distribution. Default is 1.0.
name(str, optional): The default value is None. Normally there is no need for user to set this
name(str|None, optional): The default value is None. Normally there is no need for user to set this
property. For more information, please refer to :ref:`api_guide_Name`. Default: None.

Returns:
Expand Down Expand Up @@ -156,7 +163,9 @@ class Normal(NormalInitializer):
[[ 1.0754838 -4.071067 ]]])
"""

def __init__(self, mean=0.0, std=1.0, name=None):
def __init__(
self, mean: float = 0.0, std: float = 1.0, name: str | None = None
) -> None:
assert mean is not None, 'mean should not be None'
assert std is not None, 'std should not be None'
super().__init__(loc=mean, scale=std, seed=0)
Expand All @@ -178,7 +187,14 @@ class TruncatedNormalInitializer(Initializer):

"""

def __init__(self, loc=0.0, scale=1.0, seed=0, a=-2.0, b=2.0):
def __init__(
self,
loc: float = 0.0,
scale: float = 1.0,
seed: int = 0,
a: float = -2.0,
b: float = 2.0,
) -> None:
assert loc is not None
assert scale is not None
assert seed is not None
Expand All @@ -191,12 +207,14 @@ def __init__(self, loc=0.0, scale=1.0, seed=0, a=-2.0, b=2.0):
self._a = a
self._b = b

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with TruncatedNormal distribution.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -289,7 +307,7 @@ class TruncatedNormal(TruncatedNormalInitializer):
std (float, optional): Standard deviation of the normal distribution. Default is :math:`1.0`.
a (float, optional): The minimum cutoff value. Default is -2.0.
b (float, optional): The maximum cutoff value. Default is 2.0.
name (str, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.

Returns:
A parameter initialized by truncated normal distribution (Gaussian distribution).
Expand Down Expand Up @@ -325,7 +343,14 @@ class TruncatedNormal(TruncatedNormalInitializer):
[[-0.11380529 -3.0696259 ]]])
"""

def __init__(self, mean=0.0, std=1.0, a=-2.0, b=2.0, name=None):
def __init__(
self,
mean: float = 0.0,
std: float = 1.0,
a: float = -2.0,
b: float = 2.0,
name: str | None = None,
) -> None:
assert mean is not None, 'mean should not be None'
assert std is not None, 'std should not be None'
assert a is not None, 'a should not be None'
Expand Down
Loading