Set replacement=True in torch.multinomial (#858)

This commit is contained in:
Woosuk Kwon 2023-08-25 12:22:01 +09:00 committed by GitHub
parent 75c0ca9d43
commit 94d2f59895
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -302,7 +302,9 @@ def _sample_from_prompt(
# Random sampling.
# Sample `best_of` tokens for the prompt.
num_seqs = sampling_params.best_of
next_token_ids = torch.multinomial(prob, num_samples=num_seqs)
next_token_ids = torch.multinomial(prob,
num_samples=num_seqs,
replacement=True)
next_token_ids = next_token_ids.tolist()
return next_token_ids