From 15b8fef453b373b84406207a947005a4d9d68acc Mon Sep 17 00:00:00 2001 From: TaoYu Chen Date: Sat, 13 Sep 2025 16:11:59 +0800 Subject: [PATCH] Remove redundant assignment in xfer_buffers, This is a little fix (#24732) Signed-off-by: ChenTaoyu-SJTU --- vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py b/vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py index 17f5be76ce400..c306eeb5aa7ab 100644 --- a/vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py +++ b/vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py @@ -708,8 +708,6 @@ class NixlConnectorWorker: caches_data = [] # With hybrid allocator, layers can share a kv cache tensor seen_base_addresses = [] - xfer_buffers = (self.host_xfer_buffers - if self.use_host_buffer else kv_caches) # Note(tms): I modified this from the original region setup code. # K and V are now in different regions. Advantage is that we can