From ad3cf5ed98ec599daefb0d6f69dc7371eb9459a8 Mon Sep 17 00:00:00 2001 From: Awni Hannun Date: Fri, 8 Mar 2024 13:07:10 -0800 Subject: [PATCH] dropout 0 as default (#549) --- llms/mlx_lm/tuner/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llms/mlx_lm/tuner/lora.py b/llms/mlx_lm/tuner/lora.py index adc1f8ca..d83b9025 100644 --- a/llms/mlx_lm/tuner/lora.py +++ b/llms/mlx_lm/tuner/lora.py @@ -10,7 +10,7 @@ class LoRALinear(nn.Module): linear: nn.Linear, r: int = 8, lora_alpha: float = 16, - lora_dropout: float = 0.05, + lora_dropout: float = 0.0, scale: float = 10.0, ): # TODO remove when input_dims and output_dims are attributes