Activations LeakyReLU / PReLU / Softplus / Mish (#109)

* Leaky_relu / prelu / softplus / mish

* added tests

* updated bench

* remove torch refs, add init to PReLU

* added arvix reference to mish

* added missing docs
This commit is contained in:
Diogo
2023-12-11 22:40:57 -05:00
committed by GitHub
parent f5df47ec6e
commit 02de234ef0
8 changed files with 133 additions and 31 deletions

View File

@@ -7,6 +7,8 @@ from mlx.nn.layers.activations import (
SELU,
LeakyReLU,
LogSigmoid,
Mish,
PReLU,
ReLU,
ReLU6,
SiLU,
@@ -19,6 +21,8 @@ from mlx.nn.layers.activations import (
gelu_fast_approx,
leaky_relu,
log_sigmoid,
mish,
prelu,
relu,
relu6,
selu,

View File

@@ -176,6 +176,33 @@ def selu(x):
See also :func:`elu`.
"""
return elu(x, 1.67326) * 1.0507
def prelu(x: mx.array, alpha: mx.array) -> mx.array:
r"""Applies the element-wise function:
.. math::
\text{PReLU}(x) = \max(0,x) + a * \min(0,x)
Here :math:`a` is an array.
"""
return mx.maximum(0, x) + alpha * mx.minimum(0, x)
def mish(x: mx.array) -> mx.array:
r"""Applies the Mish function, element-wise.
Mish: A Self Regularized Non-Monotonic Neural Activation Function.
Reference: https://arxiv.org/abs/1908.08681
.. math::
\text{Mish}(x) = x * \text{Tanh}(\text{Softplus}(x))
"""
return x * mx.tanh(softplus(x))
@_make_activation_module(mish)
class Mish(Module):
pass
@_make_activation_module(relu)
@@ -257,6 +284,15 @@ class LogSigmoid(Module):
pass
class PReLU(Module):
def __init__(self, num_parameters=1, init=0.25):
super().__init__()
self.weight = mx.full([num_parameters], init)
def __call__(self, x: mx.array):
return prelu(x, self.weight)
class GELU(Module):
r"""Applies the Gaussian Error Linear Units.

View File

@@ -2,8 +2,10 @@
import os
import unittest
from typing import Callable, List, Tuple
import mlx.core as mx
import numpy as np
class MLXTestCase(unittest.TestCase):
@@ -16,3 +18,16 @@ class MLXTestCase(unittest.TestCase):
def tearDown(self):
mx.set_default_device(self.default)
def assertEqualArray(
self,
args: List[mx.array | float | int],
mlx_func: Callable[..., mx.array],
expected: mx.array,
atol=1e-2,
rtol=1e-2,
):
mx_res = mlx_func(*args)
assert tuple(mx_res.shape) == tuple(expected.shape), "shape mismatch"
assert mx_res.dtype == expected.dtype, "dtype mismatch"
np.testing.assert_allclose(mx_res, expected, rtol=rtol, atol=atol)

View File

@@ -449,31 +449,19 @@ class TestNN(mlx_tests.MLXTestCase):
self.assertEqual(y.shape, [3])
self.assertEqual(y.dtype, mx.float32)
def test_step_activation(self):
x = mx.arange(-3, 4)
expected = mx.array([0, 0, 0, 0, 0, 1, 1])
y = nn.Step()(x)
self.assertTrue(mx.array_equal(y, expected))
y = nn.Step(2)(x)
expected = mx.array([0, 0, 0, 0, 0, 0, 1])
self.assertTrue(mx.array_equal(y, expected))
def test_selu(self):
x = mx.arange(-3, 4)
expected = mx.array(
[
-1.670563817024231,
-1.5201621055603027,
-1.1113275289535522,
0.0,
1.0506999492645264,
2.1013998985290527,
3.152099847793579,
]
def test_prelu(self):
self.assertEqualArray(
[mx.array([1.0, -1.0, 0.0, 0.5])],
nn.PReLU(),
mx.array([1.0, -0.25, 0.0, 0.5]),
)
def test_mish(self):
self.assertEqualArray(
[mx.array([1.0, -1.0, 0.0, 0.5])],
nn.Mish(),
mx.array([0.8651, -0.3034, 0.0000, 0.3752]),
)
y = nn.SELU()(x)
self.assertTrue(mx.allclose(y, expected))
if __name__ == "__main__":