Pass use_dora parameter to linear_to_lora_layers (#885)

This commit is contained in:
Chime Ogbuji 2024-07-11 17:34:34 -04:00 committed by GitHub
parent fbe3247772
commit 8bf397e450
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -166,7 +166,7 @@ def train_model(
model.freeze()
# Convert linear layers to lora layers and unfreeze in the process
linear_to_lora_layers(model, args.lora_layers, args.lora_parameters)
linear_to_lora_layers(model, args.lora_layers, args.lora_parameters, args.use_dora)
# Resume training the given adapters.
if args.resume_adapter_file is not None: