Add max sequence length argument in lora.py (#408)

A new argument "--max_seq_length" has been added to the command-line parser and passed as a parameter to the main function of the lora.py script. This allows users to specify and control the maximum sequence length during training.
This commit is contained in:
Ivan Fioravanti 2024-02-04 21:28:21 +01:00 committed by GitHub
parent 9d0dd34403
commit 7fbca214b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -109,6 +109,12 @@ def build_parser():
default=500,
help="Number of test set batches, -1 uses the entire test set.",
)
parser.add_argument(
"--max_seq_length",
type=int,
default=2048,
help="Maximum sequence length.",
)
parser.add_argument("--seed", type=int, default=0, help="The PRNG seed")
return parser
@ -197,6 +203,7 @@ if __name__ == "__main__":
steps_per_eval=args.steps_per_eval,
steps_per_save=args.save_every,
adapter_file=args.adapter_file,
max_seq_length=args.max_seq_length
)
if args.train:
print("Training")