Fix max_tokens (#1148)

This commit is contained in:
Alex Barron 2024-12-10 11:26:04 -08:00 committed by GitHub
parent 12083c4b7e
commit 135c5818c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -79,7 +79,7 @@ def main():
model,
tokenizer,
prompt,
args.max_tokens,
max_tokens=args.max_tokens,
sampler=make_sampler(args.temp, args.top_p),
prompt_cache=prompt_cache,
):