From ab4bf05c6e72928ec0ca0143a3f976f6e787e40c Mon Sep 17 00:00:00 2001 From: hschaeufler <9865991+hschaeufler@users.noreply.github.com> Date: Sat, 26 Oct 2024 19:34:46 +0300 Subject: [PATCH] Update lora_config.yaml with new param: num_layers (#1068) --- llms/mlx_lm/examples/lora_config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llms/mlx_lm/examples/lora_config.yaml b/llms/mlx_lm/examples/lora_config.yaml index 4ec9a23c..530272c7 100644 --- a/llms/mlx_lm/examples/lora_config.yaml +++ b/llms/mlx_lm/examples/lora_config.yaml @@ -14,7 +14,7 @@ data: "/path/to/training/data" seed: 0 # Number of layers to fine-tune -lora_layers: 16 +num_layers: 16 # Minibatch size. batch_size: 4