mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 03:54:56 +08:00
[Core] Remove unused prev_sampled_token_ids_invalid_indices input batch field (#26514)
Signed-off-by: Nick Hill <nhill@redhat.com>
This commit is contained in:
parent
44f633dba1
commit
2e54db4d2b
@ -251,7 +251,6 @@ class InputBatch:
|
||||
|
||||
# Cached reference to the GPU tensor of previously sampled tokens
|
||||
self.prev_sampled_token_ids: Optional[torch.Tensor] = None
|
||||
self.prev_sampled_token_ids_invalid_indices: Optional[set[int]] = None
|
||||
self.prev_req_id_to_index: Optional[dict[str, int]] = None
|
||||
|
||||
@property
|
||||
|
||||
@ -2305,9 +2305,6 @@ class GPUModelRunner(LoRAModelRunnerMixin, KVConnectorModelRunnerMixin):
|
||||
# These will be copied into input_ids in the next step
|
||||
# when preparing inputs.
|
||||
self.input_batch.prev_sampled_token_ids = sampled_token_ids
|
||||
self.input_batch.prev_sampled_token_ids_invalid_indices = (
|
||||
invalid_req_indices_set
|
||||
)
|
||||
self.input_batch.prev_req_id_to_index = {
|
||||
req_id: i
|
||||
for i, req_id in enumerate(self.input_batch.req_ids)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user