diff --git a/llms/mlx_lm/examples/lora_config.yaml b/llms/mlx_lm/examples/lora_config.yaml index 90bdd6ad..ae79fb65 100644 --- a/llms/mlx_lm/examples/lora_config.yaml +++ b/llms/mlx_lm/examples/lora_config.yaml @@ -34,7 +34,7 @@ steps_per_eval: 200 resume_adapter_file: null # Save/load path for the trained adapter weights. -adapter_file: "adapters.npz" +adapter_path: "adapters" # Save the model every N iterations. save_every: 100