mlx-examples/llms/mlx_lm/tuner/utils.py
Anchen 362e88a744
feat: move lora into mlx-lm (#337)
* feat: Add lora and qlora training to mlx-lm


---------

Co-authored-by: Awni Hannun <awni@apple.com>
2024-01-23 08:44:37 -08:00

23 lines
675 B
Python

import mlx.core as mx
from mlx.utils import tree_unflatten
from .lora import LoRALinear
def apply_lora_layers(model, adapter_file: str):
adapters = list(mx.load(adapter_file).items())
linear_replacements = {}
lora_layers = set(
[name.replace(".lora_a", "").replace(".lora_b", "") for name, _ in adapters]
)
for name, module in model.named_modules():
if name in lora_layers:
replacement_module = LoRALinear.from_linear(module)
linear_replacements[name] = replacement_module
model.update_modules(tree_unflatten(list(linear_replacements.items())))
model.update(tree_unflatten(adapters))
return model