From ef7b8756c03b6a958bd0341ed084304196ce47c8 Mon Sep 17 00:00:00 2001 From: __mo_san__ <50895527+m0saan@users.noreply.github.com> Date: Sun, 10 Dec 2023 04:25:38 +0100 Subject: [PATCH] Add tanh activation function (#115) * added Adagrad optimizer ... * added Tanh activation function ... * reformatted file ... * remove unrelated stuff ... * Update activations.py --- python/mlx/nn/layers/activations.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/python/mlx/nn/layers/activations.py b/python/mlx/nn/layers/activations.py index cb1460ef3..2b5ded696 100644 --- a/python/mlx/nn/layers/activations.py +++ b/python/mlx/nn/layers/activations.py @@ -129,3 +129,16 @@ class GELU(Module): def __call__(self, x): return self._act(x) + + +def tanh(x): + """Applies the hyperbolic tangent function. + + Simply ``mx.tanh(x)``. + """ + return mx.tanh(x) + + +@_make_activation_module(tanh) +class Tanh(Module): + pass