Fix for stream_generate() takes 3 positional arguments but 4 were given

This commit is contained in:
Ivan Fioravanti 2024-12-11 06:17:33 +01:00
parent 135c5818c1
commit 1fd2bfa5f5

View File

@ -76,9 +76,9 @@ def main():
messages, tokenize=False, add_generation_prompt=True
)
for response in stream_generate(
model,
tokenizer,
prompt,
model=model,
tokenizer=tokenizer,
prompt=prompt,
max_tokens=args.max_tokens,
sampler=make_sampler(args.temp, args.top_p),
prompt_cache=prompt_cache,