Add tanh activation function (#115)

* added Adagrad optimizer ...

* added Tanh activation function ...

* reformatted file ...

* remove unrelated stuff ...

* Update activations.py
This commit is contained in:
__mo_san__ 2023-12-10 04:25:38 +01:00 committed by GitHub
parent 0b28399638
commit ef7b8756c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -129,3 +129,16 @@ class GELU(Module):
def __call__(self, x):
return self._act(x)
def tanh(x):
"""Applies the hyperbolic tangent function.
Simply ``mx.tanh(x)``.
"""
return mx.tanh(x)
@_make_activation_module(tanh)
class Tanh(Module):
pass