mirror of
https://github.com/ml-explore/mlx-examples.git
synced 2025-07-23 04:21:14 +08:00
small fix
This commit is contained in:
parent
35ecc17042
commit
978deab589
@ -117,7 +117,7 @@ def generate_grpo(model, prompt, max_tokens, tokenizer, temperature):
|
||||
|
||||
end_sequence = tokenizer.encode("</answer>")
|
||||
end_sequence_length = len(end_sequence)
|
||||
output = mx.zeros((prompt.shape[1] + max_tokens,))
|
||||
output = mx.zeros((prompt.shape[1] + max_tokens,), dtype=mx.int32)
|
||||
output[:prompt.shape[1]] = prompt[0]
|
||||
current_length = prompt.shape[1]
|
||||
|
||||
@ -126,7 +126,7 @@ def generate_grpo(model, prompt, max_tokens, tokenizer, temperature):
|
||||
if temperature > 0:
|
||||
logits /= temperature
|
||||
logprobs = logits - mx.logsumexp(logits, keepdims=True)
|
||||
return mx.random.categorical(logprobs[None, :])[0]
|
||||
return mx.random.categorical(logprobs[None, :]).astype(mx.int32)[0]
|
||||
|
||||
for _ in range(max_tokens):
|
||||
current_input = output[:current_length][None, :]
|
||||
|
Loading…
Reference in New Issue
Block a user