Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Typing][A-27,A-28,A-32,A-33,A-35,A-36] Add type annotations for paddle/nn/initializer/* #65206

Merged
merged 3 commits into from
Jun 16, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 19 additions & 5 deletions python/paddle/nn/initializer/assign.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

from typing import TYPE_CHECKING

import paddle
from paddle import _C_ops

Expand All @@ -23,6 +28,9 @@
)
from .initializer import Initializer

if TYPE_CHECKING:
import numpy
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
import numpy
import numpy.typing as npt


__all__ = []


Expand All @@ -38,19 +46,21 @@ class NumpyArrayInitializer(Initializer):

"""

def __init__(self, value):
def __init__(self, value: numpy.ndarray):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
def __init__(self, value: numpy.ndarray):
def __init__(self, value: npt.NDArray[Any]):

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

__init__ 标注一下返回值吧,都是 None

import numpy

assert isinstance(value, numpy.ndarray)
super().__init__()
self._value = value

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: paddle.pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Numpy array.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -172,7 +182,7 @@ class Assign(NumpyArrayInitializer):

Args:
value (Tensor|numpy.ndarray|list|tuple): numpy array, list, tuple, or tensor to initialize the parameter.
name(str, optional): Normally there is no need for user to set this
name(str|None, optional): Normally there is no need for user to set this
property. For more information, please refer to :ref:`api_guide_Name`. Default is None.

Returns:
Expand Down Expand Up @@ -239,7 +249,11 @@ class Assign(NumpyArrayInitializer):
[6.]
"""

def __init__(self, value, name=None):
def __init__(
self,
value: numpy.ndarray | list[int] | tuple[int] | paddle.Tensor,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

tuple[int] 表示 (1, ) 这种只有一个元素的,如果是不定长元素,应该用 tuple[int, ...],在支持 list[T] | tuple[T, ...] 的情况下,更常见的是直接写 Sequence[T]

name: str | None = None,
):
import numpy

check_type(
Expand Down
9 changes: 7 additions & 2 deletions python/paddle/nn/initializer/bilinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import numpy as np

import paddle
from paddle import _C_ops, pir

from ...base import core, framework, unique_name
Expand Down Expand Up @@ -78,12 +81,14 @@ def __init__(self):
"""Constructor for BilinearInitializer."""
super().__init__()

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Bilinear initialization.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down
32 changes: 23 additions & 9 deletions python/paddle/nn/initializer/kaiming.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

# TODO: define the initializers of Kaiming functions in neural network
import math

Expand Down Expand Up @@ -64,11 +66,11 @@ class MSRAInitializer(Initializer):

def __init__(
self,
uniform=True,
fan_in=None,
seed=0,
negative_slope=0,
nonlinearity='relu',
uniform: bool = True,
fan_in: float | None = None,
seed: int = 0,
negative_slope: float = 0,
nonlinearity: str = 'relu',
Copy link
Member

@SigureMo SigureMo Jun 15, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

是否直接用 Literal?见

recommended_gain = {
'sigmoid': 1,
'linear': 1,
'conv1d': 1,
'conv2d': 1,
'conv3d': 1,
'conv1d_transpose': 1,
'conv2d_transpose': 1,
'conv3d_transpose': 1,
'tanh': 5.0 / 3,
'relu': math.sqrt(2.0),
'leaky_relu': math.sqrt(2.0 / (1 + param**2)),
'selu': 3.0 / 4,
}

使用 TypeAlias 写在 initializer.py 里这边 import 过来就好

):
"""Constructor for MSRAInitializer"""
assert uniform is not None
Expand All @@ -80,12 +82,14 @@ def __init__(
self._negative_slope = negative_slope
self._nonlinearity = nonlinearity

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: paddle.pir.Block | None = None
) -> paddle.Tensor | None:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这个返回值,基类以及标注过的几个文件也都统一一下吧

"""Initialize the input tensor with MSRA initialization.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -271,7 +275,12 @@ class KaimingNormal(MSRAInitializer):

"""

def __init__(self, fan_in=None, negative_slope=0.0, nonlinearity='relu'):
def __init__(
self,
fan_in: float | None = None,
negative_slope: float = 0.0,
nonlinearity: str = 'relu',
):
super().__init__(
uniform=False,
fan_in=fan_in,
Expand Down Expand Up @@ -317,7 +326,12 @@ class KaimingUniform(MSRAInitializer):

"""

def __init__(self, fan_in=None, negative_slope=0.0, nonlinearity='relu'):
def __init__(
self,
fan_in: float | None = None,
negative_slope: float = 0.0,
nonlinearity: str = 'relu',
):
super().__init__(
uniform=True,
fan_in=fan_in,
Expand Down
41 changes: 31 additions & 10 deletions python/paddle/nn/initializer/normal.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import paddle
from paddle import _C_ops, pir

from ...base import core, framework, unique_name
Expand All @@ -36,7 +39,7 @@ class NormalInitializer(Initializer):

"""

def __init__(self, loc=0.0, scale=1.0, seed=0):
def __init__(self, loc: float = 0.0, scale: float = 1.0, seed: int = 0):
assert loc is not None
assert scale is not None
assert seed is not None
Expand All @@ -45,12 +48,12 @@ def __init__(self, loc=0.0, scale=1.0, seed=0):
self._std_dev = scale
self._seed = seed

def forward(self, var, block=None):
def forward(self, var: paddle.Tensor, block: pir.Block | None = None):
"""Initialize the input tensor with Normal distribution.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -119,7 +122,7 @@ class Normal(NormalInitializer):
Args:
mean (float, optional): mean of the normal distribution. Default is 0.0.
std (float, optional): standard deviation of the normal distribution. Default is 1.0.
name(str, optional): The default value is None. Normally there is no need for user to set this
name(str|None, optional): The default value is None. Normally there is no need for user to set this
property. For more information, please refer to :ref:`api_guide_Name`. Default: None.

Returns:
Expand Down Expand Up @@ -156,7 +159,9 @@ class Normal(NormalInitializer):
[[ 1.0754838 -4.071067 ]]])
"""

def __init__(self, mean=0.0, std=1.0, name=None):
def __init__(
self, mean: float = 0.0, std: float = 1.0, name: str | None = None
):
assert mean is not None, 'mean should not be None'
assert std is not None, 'std should not be None'
super().__init__(loc=mean, scale=std, seed=0)
Expand All @@ -178,7 +183,14 @@ class TruncatedNormalInitializer(Initializer):

"""

def __init__(self, loc=0.0, scale=1.0, seed=0, a=-2.0, b=2.0):
def __init__(
self,
loc: float = 0.0,
scale: float = 1.0,
seed: int = 0,
a: float = -2.0,
b: float = 2.0,
):
assert loc is not None
assert scale is not None
assert seed is not None
Expand All @@ -191,12 +203,14 @@ def __init__(self, loc=0.0, scale=1.0, seed=0, a=-2.0, b=2.0):
self._a = a
self._b = b

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> None | paddle.Tensor:
"""Initialize the input tensor with TruncatedNormal distribution.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -289,7 +303,7 @@ class TruncatedNormal(TruncatedNormalInitializer):
std (float, optional): Standard deviation of the normal distribution. Default is :math:`1.0`.
a (float, optional): The minimum cutoff value. Default is -2.0.
b (float, optional): The maximum cutoff value. Default is 2.0.
name (str, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.

Returns:
A parameter initialized by truncated normal distribution (Gaussian distribution).
Expand Down Expand Up @@ -325,7 +339,14 @@ class TruncatedNormal(TruncatedNormalInitializer):
[[-0.11380529 -3.0696259 ]]])
"""

def __init__(self, mean=0.0, std=1.0, a=-2.0, b=2.0, name=None):
def __init__(
self,
mean: float = 0.0,
std: float = 1.0,
a: float = -2.0,
b: float = 2.0,
name: str | None = None,
):
assert mean is not None, 'mean should not be None'
assert std is not None, 'std should not be None'
assert a is not None, 'a should not be None'
Expand Down
23 changes: 18 additions & 5 deletions python/paddle/nn/initializer/uniform.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import paddle
from paddle import _C_ops, pir

from ...base import core, framework, unique_name
Expand Down Expand Up @@ -43,7 +46,13 @@ class UniformInitializer(Initializer):
"""

def __init__(
self, low=-1.0, high=1.0, seed=0, diag_num=0, diag_step=0, diag_val=1.0
self,
low: float = -1.0,
high: float = 1.0,
seed: int = 0,
diag_num: int = 0,
diag_step: int = 0,
diag_val: float = 1.0,
):
assert low is not None
assert high is not None
Expand All @@ -62,12 +71,14 @@ def __init__(
self._diag_step = diag_step
self._diag_val = diag_val

def forward(self, var, block=None):
def forward(
self, var: paddle.Tensor, block: pir.Block | None = None
) -> paddle.Tensor | None:
"""Initialize the input tensor with Uniform distribution.

Args:
var(Tensor): Tensor that needs to be initialized.
block(Block, optional): The block in which initialization ops
block(Block|None, optional): The block in which initialization ops
should be added. Used in static graph only, default None.

Returns:
Expand Down Expand Up @@ -176,7 +187,7 @@ class Uniform(UniformInitializer):
Args:
low (float, optional): Lower boundary of the uniform distribution. Default is :math:`-1.0`.
high (float, optional): Upper boundary of the uniform distribution. Default is :math:`1.0`.
name (str, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.

Returns:
A parameter initialized by uniform distribution.
Expand Down Expand Up @@ -213,7 +224,9 @@ class Uniform(UniformInitializer):
[[-0.41843393, 0.27575102]]])
"""

def __init__(self, low=-1.0, high=1.0, name=None):
def __init__(
self, low: float = -1.0, high: float = 1.0, name: str | None = None
):
assert low is not None, 'low should not be None'
assert high is not None, 'high should not be None'
assert high >= low, 'high should greater or equal than low'
Expand Down
Loading