diff --git a/.gitignore b/.gitignore index 450fc5943..c382646a2 100644 --- a/.gitignore +++ b/.gitignore @@ -74,3 +74,6 @@ build/ # VSCode .vscode/ .DS_Store + +# Jetbrains +.cache diff --git a/python/mlx/nn/layers/activations.py b/python/mlx/nn/layers/activations.py index 2b5ded696..75e9aac60 100644 --- a/python/mlx/nn/layers/activations.py +++ b/python/mlx/nn/layers/activations.py @@ -15,6 +15,16 @@ def _make_activation_module(f): return decorator +def sigmoid(x): + r"""Applies the element-wise function: + + .. math:: + \text{Sigmoid}(x) = \sigma(x) = \frac{1}{1 + \exp(-x)} + """ + return mx.sigmoid(x) + + + def relu(x): """Applies the Rectified Linear Unit. @@ -79,6 +89,9 @@ def gelu_fast_approx(x): """ return x * mx.sigmoid(1.773 * x) +@_make_activation_module +class Sigmoid(Module): + pass @_make_activation_module(relu) class ReLU(Module):