[V1] Fix yapf (#11538)

Signed-off-by: Woosuk Kwon <woosuk.kwon@berkeley.edu>
This commit is contained in:
Woosuk Kwon 2024-12-27 09:47:10 +09:00 committed by GitHub
parent 371d04d39b
commit 81b979f2a8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 21 additions and 19 deletions

View File

@ -2,8 +2,7 @@ from typing import List, Set, Tuple
import torch import torch
from vllm.model_executor.layers.utils import ( from vllm.model_executor.layers.utils import apply_penalties
apply_penalties as _apply_penalties)
from vllm.utils import is_pin_memory_available, make_tensor_with_pad from vllm.utils import is_pin_memory_available, make_tensor_with_pad
@ -17,27 +16,30 @@ def apply_min_token_penalties(logits: torch.Tensor,
""" """
min_tokens_logits_to_penalize: List[Tuple[int, int]] = [] min_tokens_logits_to_penalize: List[Tuple[int, int]] = []
for index, min_token in enumerate(min_tokens): for index, min_token in enumerate(min_tokens):
if (len(output_token_ids[index]) < min_token): if len(output_token_ids[index]) < min_token:
for stop_token_id in stop_token_ids[index]: for stop_token_id in stop_token_ids[index]:
min_tokens_logits_to_penalize.append((index, stop_token_id)) min_tokens_logits_to_penalize.append((index, stop_token_id))
if min_tokens_logits_to_penalize: if min_tokens_logits_to_penalize:
logits[tuple(zip(*min_tokens_logits_to_penalize))] = -float("inf") logits[tuple(zip(*min_tokens_logits_to_penalize))] = -float("inf")
def apply_penalties(logits: torch.Tensor, prompt_token_ids: torch.Tensor, def apply_all_penalties(
presence_penalties: torch.Tensor, logits: torch.Tensor,
frequency_penalties: torch.Tensor, prompt_token_ids: torch.Tensor,
repetition_penalties: torch.Tensor, presence_penalties: torch.Tensor,
output_token_ids: List[List[int]]) -> torch.Tensor: frequency_penalties: torch.Tensor,
repetition_penalties: torch.Tensor,
output_token_ids: List[List[int]],
) -> torch.Tensor:
""" """
Applies presence, frequency and repetition penalties to the logits. Applies presence, frequency and repetition penalties to the logits.
""" """
_, vocab_size = logits.shape _, vocab_size = logits.shape
output_tokens_t = _convert_to_tensors(output_token_ids, vocab_size, output_tokens_t = _convert_to_tensors(output_token_ids, vocab_size,
logits.device) logits.device)
return _apply_penalties(logits, prompt_token_ids, output_tokens_t, return apply_penalties(logits, prompt_token_ids, output_tokens_t,
presence_penalties, frequency_penalties, presence_penalties, frequency_penalties,
repetition_penalties) repetition_penalties)
def _convert_to_tensors(output_token_ids: List[List[int]], vocab_size: int, def _convert_to_tensors(output_token_ids: List[List[int]], vocab_size: int,

View File

@ -6,8 +6,8 @@ import torch.nn as nn
from vllm.v1.outputs import SamplerOutput from vllm.v1.outputs import SamplerOutput
from vllm.v1.sample.metadata import SamplingMetadata from vllm.v1.sample.metadata import SamplingMetadata
from vllm.v1.sample.ops.penalties import (apply_min_token_penalties, from vllm.v1.sample.ops.penalties import (apply_all_penalties,
apply_penalties) apply_min_token_penalties)
from vllm.v1.sample.ops.topk_topp_sampler import TopKTopPSampler from vllm.v1.sample.ops.topk_topp_sampler import TopKTopPSampler
_SAMPLING_EPS = 1e-5 _SAMPLING_EPS = 1e-5
@ -127,10 +127,10 @@ class Sampler(nn.Module):
sampling_metadata.min_tokens) sampling_metadata.min_tokens)
if not sampling_metadata.no_penalties: if not sampling_metadata.no_penalties:
assert sampling_metadata.prompt_token_ids is not None assert sampling_metadata.prompt_token_ids is not None
logits = apply_penalties(logits, logits = apply_all_penalties(
sampling_metadata.prompt_token_ids, logits, sampling_metadata.prompt_token_ids,
sampling_metadata.presence_penalties, sampling_metadata.presence_penalties,
sampling_metadata.frequency_penalties, sampling_metadata.frequency_penalties,
sampling_metadata.repetition_penalties, sampling_metadata.repetition_penalties,
sampling_metadata.output_token_ids) sampling_metadata.output_token_ids)
return logits return logits