mirror of
https://github.com/ml-explore/mlx.git
synced 2025-07-20 08:01:12 +08:00
Add tanh activation function (#115)
* added Adagrad optimizer ... * added Tanh activation function ... * reformatted file ... * remove unrelated stuff ... * Update activations.py
This commit is contained in:
parent
0b28399638
commit
ef7b8756c0
@ -129,3 +129,16 @@ class GELU(Module):
|
|||||||
|
|
||||||
def __call__(self, x):
|
def __call__(self, x):
|
||||||
return self._act(x)
|
return self._act(x)
|
||||||
|
|
||||||
|
|
||||||
|
def tanh(x):
|
||||||
|
"""Applies the hyperbolic tangent function.
|
||||||
|
|
||||||
|
Simply ``mx.tanh(x)``.
|
||||||
|
"""
|
||||||
|
return mx.tanh(x)
|
||||||
|
|
||||||
|
|
||||||
|
@_make_activation_module(tanh)
|
||||||
|
class Tanh(Module):
|
||||||
|
pass
|
||||||
|
Loading…
Reference in New Issue
Block a user