Update lora_config.yaml with new param: num_layers (#1068)

This commit is contained in:
hschaeufler 2024-10-26 19:34:46 +03:00 committed by GitHub
parent 4971462bf0
commit ab4bf05c6e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -14,7 +14,7 @@ data: "/path/to/training/data"
seed: 0 seed: 0
# Number of layers to fine-tune # Number of layers to fine-tune
lora_layers: 16 num_layers: 16
# Minibatch size. # Minibatch size.
batch_size: 4 batch_size: 4