diff --git a/python/mlx/optimizers.py b/python/mlx/optimizers.py index d737d5d866..de77d9bea5 100644 --- a/python/mlx/optimizers.py +++ b/python/mlx/optimizers.py @@ -424,7 +424,9 @@ class Adamax(Adam): ): super().__init__(learning_rate, betas, eps) if not 0.0 <= eps: - raise ValueError(f"Epsilon value should be >=0, {self.eps} was provided instead") + raise ValueError( + f"Epsilon value should be >=0, {self.eps} was provided instead" + ) def apply_single( self, gradient: mx.array, parameter: mx.array, state: OptimizerState