Fix for stream_generate() takes 3 positional arguments but 4 were given

This commit is contained in:
Ivan Fioravanti 2024-12-11 06:17:33 +01:00
parent 135c5818c1
commit 1fd2bfa5f5

View File

@ -76,9 +76,9 @@ def main():
messages, tokenize=False, add_generation_prompt=True messages, tokenize=False, add_generation_prompt=True
) )
for response in stream_generate( for response in stream_generate(
model, model=model,
tokenizer, tokenizer=tokenizer,
prompt, prompt=prompt,
max_tokens=args.max_tokens, max_tokens=args.max_tokens,
sampler=make_sampler(args.temp, args.top_p), sampler=make_sampler(args.temp, args.top_p),
prompt_cache=prompt_cache, prompt_cache=prompt_cache,