diff --git a/python/paddle/sparse/nn/layer/activation.py b/python/paddle/sparse/nn/layer/activation.py index ec01db4fc0dc7c..af0562801c6bbd 100644 --- a/python/paddle/sparse/nn/layer/activation.py +++ b/python/paddle/sparse/nn/layer/activation.py @@ -12,10 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + from paddle.nn import Layer from .. import functional as F +if TYPE_CHECKING: + from paddle import Tensor + __all__ = [] @@ -29,7 +36,7 @@ class ReLU(Layer): ReLU(x) = max(x, 0) Parameters: - name (str, optional): Name for the operation (optional, default is None). + name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. Shape: @@ -51,14 +58,14 @@ class ReLU(Layer): values=[0., 1.]) """ - def __init__(self, name=None): + def __init__(self, name: str | None = None) -> None: super().__init__() self._name = name - def forward(self, x): + def forward(self, x: Tensor) -> Tensor: return F.relu(x, self._name) - def extra_repr(self): + def extra_repr(self) -> str: name_str = f'name={self._name}' if self._name else '' return name_str @@ -81,7 +88,7 @@ class Softmax(Layer): Parameters: axis (int, optional): The axis along which to perform softmax calculations. Only support -1 for SparseCsrTensor. - name (str, optional): Name for the operation (optional, default is None). + name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. Shape: @@ -95,7 +102,7 @@ class Softmax(Layer): >>> paddle.seed(2022) >>> mask = paddle.rand((3, 4)) < 0.7 - >>> x = paddle.rand((3, 4)) * mask + >>> x = paddle.rand((3, 4)) * mask.astype('float32') >>> print(x) Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True, [[0.88156885, 0.14463395, 0.17831714, 0.43818203], @@ -136,15 +143,15 @@ class Softmax(Layer): 0.42031071, 0.36666498, 0.59738696, 0.40261301]) """ - def __init__(self, axis=-1, name=None): + def __init__(self, axis: int = -1, name: str | None = None) -> None: super().__init__() self._axis = axis self._name = name - def forward(self, x): + def forward(self, x: Tensor) -> Tensor: return F.softmax(x, self._axis, self._name) - def extra_repr(self): + def extra_repr(self) -> str: name_str = f'name={self._name}' if self._name else '' return name_str @@ -159,7 +166,7 @@ class ReLU6(Layer): ReLU6(x) = min(max(0,x), 6) Parameters: - name (str, optional): Name for the operation (optional, default is None). + name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. Shape: @@ -177,14 +184,14 @@ class ReLU6(Layer): >>> out = relu6(sparse_x) """ - def __init__(self, name=None): + def __init__(self, name: str | None = None) -> None: super().__init__() self._name = name - def forward(self, x): + def forward(self, x: Tensor) -> Tensor: return F.relu6(x, self._name) - def extra_repr(self): + def extra_repr(self) -> str: name_str = f'name={self._name}' if self._name else '' return name_str @@ -207,7 +214,7 @@ class LeakyReLU(Layer): Parameters: negative_slope (float, optional): Slope of the activation function at :math:`x < 0` . Default is 0.01. - name (str, optional): Name for the operation (optional, default is None). + name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. Shape: @@ -226,14 +233,16 @@ class LeakyReLU(Layer): """ - def __init__(self, negative_slope=0.01, name=None): + def __init__( + self, negative_slope: float = 0.01, name: str | None = None + ) -> None: super().__init__() self._negative_slope = negative_slope self._name = name - def forward(self, x): + def forward(self, x: Tensor) -> Tensor: return F.leaky_relu(x, self._negative_slope, self._name) - def extra_repr(self): + def extra_repr(self) -> str: name_str = f'name={self._name}' if self._name else '' return name_str