Fix max_tokens (#1148)

This commit is contained in:
Alex Barron 2024-12-10 11:26:04 -08:00 committed by Billel Mokeddem
parent 64781fddf1
commit b83a73049d

View File

@ -79,7 +79,7 @@ def main():
model,
tokenizer,
prompt,
args.max_tokens,
max_tokens=args.max_tokens,
sampler=make_sampler(args.temp, args.top_p),
prompt_cache=prompt_cache,
):