From 0a49ba0697e9667a1dd50b8792672a86418b7262 Mon Sep 17 00:00:00 2001
From: Anchen
Date: Thu, 1 Feb 2024 06:51:26 +1100
Subject: [PATCH] fix(mlx-lm): apply lora layer doesn't update the lora weights
(#396)
---
llms/mlx_lm/tuner/utils.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/llms/mlx_lm/tuner/utils.py b/llms/mlx_lm/tuner/utils.py
index dbcf1acf..74f677fb 100644
--- a/llms/mlx_lm/tuner/utils.py
+++ b/llms/mlx_lm/tuner/utils.py
@@ -33,6 +33,9 @@ def apply_lora_layers(model: nn.Module, adapter_file: str) -> nn.Module:
linear_replacements.append((name, replacement_module))
model.update_modules(tree_unflatten(linear_replacements))
+
+ model.update(tree_unflatten(adapters))
+
return model