From a415bac4f73cba38b84799b1664f57a321416fd7 Mon Sep 17 00:00:00 2001 From: Hazem Date: Sat, 23 Dec 2023 00:06:59 +0200 Subject: [PATCH] Ran pre-commit --- python/mlx/nn/layers/dropout.py | 2 -- python/mlx/nn/layers/positional_encoding.py | 15 +++++++++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/python/mlx/nn/layers/dropout.py b/python/mlx/nn/layers/dropout.py index e2cc981e2..14c5cb15e 100644 --- a/python/mlx/nn/layers/dropout.py +++ b/python/mlx/nn/layers/dropout.py @@ -88,5 +88,3 @@ class Dropout2d(Module): mask = mx.random.bernoulli(p=self._p_1, shape=mask_shape) return (1 / self._p_1) * mask * x - - diff --git a/python/mlx/nn/layers/positional_encoding.py b/python/mlx/nn/layers/positional_encoding.py index 38c91f29a..4852df546 100644 --- a/python/mlx/nn/layers/positional_encoding.py +++ b/python/mlx/nn/layers/positional_encoding.py @@ -25,7 +25,13 @@ class RoPE(Module): each dimension in the positional encodings. Default: ``10000`` """ - def __init__(self, dims: int, traditional: bool = False, base: float = 10000, scale: float = 1.0): + def __init__( + self, + dims: int, + traditional: bool = False, + base: float = 10000, + scale: float = 1.0, + ): super().__init__() self.dims = dims self.traditional = traditional @@ -80,7 +86,12 @@ class RoPE(Module): @staticmethod def create_cos_sin_theta( - N: int, D: int, offset: int = 0, base: float = 10000, scale: float = 1.0, dtype=mx.float32 + N: int, + D: int, + offset: int = 0, + base: float = 10000, + scale: float = 1.0, + dtype=mx.float32, ): D = D // 2 positions = mx.arange(offset, N, dtype=dtype) * scale