From d52383367a52304509eb046244a2a2505f1193f1 Mon Sep 17 00:00:00 2001 From: Awni Hannun Date: Sat, 20 Jan 2024 10:33:46 -0800 Subject: [PATCH] format (#510) --- python/mlx/optimizers.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/mlx/optimizers.py b/python/mlx/optimizers.py index d737d5d866..de77d9bea5 100644 --- a/python/mlx/optimizers.py +++ b/python/mlx/optimizers.py @@ -424,7 +424,9 @@ class Adamax(Adam): ): super().__init__(learning_rate, betas, eps) if not 0.0 <= eps: - raise ValueError(f"Epsilon value should be >=0, {self.eps} was provided instead") + raise ValueError( + f"Epsilon value should be >=0, {self.eps} was provided instead" + ) def apply_single( self, gradient: mx.array, parameter: mx.array, state: OptimizerState