From 989f4f430cd74a14d539d8b59b9d239301f1bdcd Mon Sep 17 00:00:00 2001 From: Cody Yu Date: Mon, 3 Mar 2025 19:09:34 -0800 Subject: [PATCH] [Misc] Remove lru_cache in NvmlCudaPlatform (#14156) Signed-off-by: Cody Yu --- vllm/platforms/cuda.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/vllm/platforms/cuda.py b/vllm/platforms/cuda.py index bffa113cab899..00bbfec1ef7ca 100644 --- a/vllm/platforms/cuda.py +++ b/vllm/platforms/cuda.py @@ -4,7 +4,7 @@ pynvml. However, it should not initialize cuda context. """ import os -from functools import lru_cache, wraps +from functools import wraps from typing import (TYPE_CHECKING, Callable, List, Optional, Tuple, TypeVar, Union) @@ -284,7 +284,6 @@ class CudaPlatformBase(Platform): class NvmlCudaPlatform(CudaPlatformBase): @classmethod - @lru_cache(maxsize=8) @with_nvml_context def get_device_capability(cls, device_id: int = 0 @@ -298,7 +297,6 @@ class NvmlCudaPlatform(CudaPlatformBase): return None @classmethod - @lru_cache(maxsize=8) @with_nvml_context def has_device_capability( cls, @@ -311,14 +309,12 @@ class NvmlCudaPlatform(CudaPlatformBase): return False @classmethod - @lru_cache(maxsize=8) @with_nvml_context def get_device_name(cls, device_id: int = 0) -> str: physical_device_id = device_id_to_physical_device_id(device_id) return cls._get_physical_device_name(physical_device_id) @classmethod - @lru_cache(maxsize=8) @with_nvml_context def get_device_uuid(cls, device_id: int = 0) -> str: physical_device_id = device_id_to_physical_device_id(device_id) @@ -326,7 +322,6 @@ class NvmlCudaPlatform(CudaPlatformBase): return pynvml.nvmlDeviceGetUUID(handle) @classmethod - @lru_cache(maxsize=8) @with_nvml_context def get_device_total_memory(cls, device_id: int = 0) -> int: physical_device_id = device_id_to_physical_device_id(device_id)