Update lora_config.yaml with new param: num_layers

This commit is contained in:
hschaeufler 2024-10-23 22:20:35 +03:00 committed by GitHub
parent 9000e280ae
commit 9284e50c67
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -14,7 +14,7 @@ data: "/path/to/training/data"
seed: 0 seed: 0
# Number of layers to fine-tune # Number of layers to fine-tune
lora_layers: 16 num_layers: 16
# Minibatch size. # Minibatch size.
batch_size: 4 batch_size: 4