From 0f790c4c84f05d12ce477ccc286766f1e8ec7adc Mon Sep 17 00:00:00 2001 From: paNikitin <115797306+paNikitin@users.noreply.github.com> Date: Sun, 23 Feb 2025 12:37:49 +0300 Subject: [PATCH] Update lora.py --- llms/mlx_lm/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llms/mlx_lm/lora.py b/llms/mlx_lm/lora.py index 45119025..6edea28d 100644 --- a/llms/mlx_lm/lora.py +++ b/llms/mlx_lm/lora.py @@ -177,7 +177,7 @@ def train_model( ): model.freeze() if args.fine_tune_type == "full": - for l in model.layers[-min(args.num_layers, 0) :]: + for l in model.layers[-max(args.num_layers, 0) :]: l.unfreeze() elif args.fine_tune_type in ["lora", "dora"]: # Convert linear layers to lora/dora layers and unfreeze in the process