mirror of
				https://github.com/ml-explore/mlx.git
				synced 2025-10-30 23:38:09 +08:00 
			
		
		
		
	Add tanh activation function (#115)
* added Adagrad optimizer ... * added Tanh activation function ... * reformatted file ... * remove unrelated stuff ... * Update activations.py
This commit is contained in:
		| @@ -129,3 +129,16 @@ class GELU(Module): | ||||
|  | ||||
|     def __call__(self, x): | ||||
|         return self._act(x) | ||||
|  | ||||
|  | ||||
| def tanh(x): | ||||
|     """Applies the hyperbolic tangent function. | ||||
|  | ||||
|     Simply ``mx.tanh(x)``. | ||||
|     """ | ||||
|     return mx.tanh(x) | ||||
|  | ||||
|  | ||||
| @_make_activation_module(tanh) | ||||
| class Tanh(Module): | ||||
|     pass | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 __mo_san__
					__mo_san__