Remove useless pass (#364)

Co-authored-by: Chunyang Wen <chunyang_wen@apple.com>
This commit is contained in:
Chunyang Wen
2024-01-04 22:34:01 +08:00
committed by GitHub
parent d752f8e142
commit 16856a0160

View File

@@ -169,7 +169,6 @@ class Sigmoid(Module):
.. math::
\text{Sigmoid}(x) = \sigma(x) = \frac{1}{1 + \exp(-x)}
"""
pass
def step(x: mx.array, threshold: float = 0.0):
@@ -251,7 +250,6 @@ class Mish(Module):
\text{Mish}(x) = x * \text{Tanh}(\text{Softplus}(x))
"""
pass
@_make_activation_module(relu)
@@ -261,7 +259,6 @@ class ReLU(Module):
See :func:`relu`, for the functional equivalent.
"""
pass
class LeakyReLU(Module):
@@ -305,7 +302,6 @@ class ReLU6(Module):
See :func:`relu6`, for the functional equivalent.
"""
pass
@_make_activation_module(softmax)
@@ -314,7 +310,6 @@ class Softmax(Module):
See :func:`softmax`, for the functional equivalent.
"""
pass
@_make_activation_module(softplus)
@@ -323,7 +318,6 @@ class Softplus(Module):
See :func:`softplus`, for the functional equivalent.
"""
pass
@_make_activation_module(softsign)
@@ -332,7 +326,6 @@ class Softsign(Module):
See :func:`softsign`, for the functional equivalent.
"""
pass
class CELU(Module):
@@ -360,7 +353,6 @@ class SiLU(Module):
See :func:`silu`, for the functional equivalent.
"""
pass
@_make_activation_module(log_softmax)
@@ -369,7 +361,6 @@ class LogSoftmax(Module):
See :func:`log_softmax`, for the functional equivalent.
"""
pass
@_make_activation_module(log_sigmoid)
@@ -378,7 +369,6 @@ class LogSigmoid(Module):
See :func:`log_sigmoid`, for the functional equivalent.
"""
pass
class PReLU(Module):
@@ -456,7 +446,6 @@ class Tanh(Module):
See :func:`tanh`, for the functional equivalent.
"""
pass
@_make_activation_module(hardswish)
@@ -465,7 +454,6 @@ class Hardswish(Module):
See :func:`hardswish`, for the functional equivalent.
"""
pass
class Step(Module):
@@ -498,4 +486,3 @@ class SELU(Module):
See :func:`selu`, for the functional equivalent.
"""
pass