From c2498164889cb5eec311993fcc1415dfb6a3a7e3 Mon Sep 17 00:00:00 2001 From: Josh Soref <2119212+jsoref@users.noreply.github.com> Date: Mon, 1 Jan 2024 22:30:59 -0500 Subject: [PATCH] spelling: default Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> --- mlx/random.h | 2 +- python/mlx/optimizers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mlx/random.h b/mlx/random.h index e684464bc..360bdbdb1 100644 --- a/mlx/random.h +++ b/mlx/random.h @@ -16,7 +16,7 @@ class KeySequence { void seed(uint64_t seed); array next(); - // static defualt + // static default static KeySequence& default_() { static KeySequence ks(0); return ks; diff --git a/python/mlx/optimizers.py b/python/mlx/optimizers.py index 17a16c459..601d87b03 100644 --- a/python/mlx/optimizers.py +++ b/python/mlx/optimizers.py @@ -253,7 +253,7 @@ class AdaDelta(Optimizer): rho (float, optional): The coefficient :math:`\rho` used for computing a running average of squared gradients. Default: ``0.9`` eps (float, optional): The term :math:`\epsilon` added to the denominator to improve - numerical stability. Ddefault: `1e-8` + numerical stability. Default: `1e-8` """ def __init__(self, learning_rate: float, rho: float = 0.9, eps: float = 1e-6):