fix(mlx-lm): apply lora layer doesn't update the lora weights (#396)

This commit is contained in:
Anchen 2024-02-01 06:51:26 +11:00 committed by GitHub
parent ab8bde1590
commit 0a49ba0697
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -33,6 +33,9 @@ def apply_lora_layers(model: nn.Module, adapter_file: str) -> nn.Module:
linear_replacements.append((name, replacement_module)) linear_replacements.append((name, replacement_module))
model.update_modules(tree_unflatten(linear_replacements)) model.update_modules(tree_unflatten(linear_replacements))
model.update(tree_unflatten(adapters))
return model return model