[Bug] Dynamo Unsupported due to BasevLLMParameter.torch_function calling disabled super() (#25613)

Signed-off-by: yewentao256 <zhyanwentao@126.com>
Signed-off-by: Wentao Ye <44945378+yewentao256@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
Wentao Ye 2025-09-24 21:52:52 -04:00 committed by GitHub
parent 05c19485a5
commit 4492e3a554
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -12,6 +12,7 @@ from torch.nn import Parameter
from vllm.distributed import (get_tensor_model_parallel_rank,
get_tensor_model_parallel_world_size)
from vllm.logger import init_logger
from vllm.utils import is_torch_equal_or_newer
__all__ = [
"BasevLLMParameter", "PackedvLLMParameter", "PerTensorScaleParameter",
@ -114,6 +115,15 @@ class BasevLLMParameter(Parameter):
@classmethod
def __torch_function__(cls, func, types, args=(), kwargs=None):
if not is_torch_equal_or_newer("2.8.0"):
logger.warning_once(
"Torch %s detected (<2.8.0): returning NotImplemented in "
"BasevLLMParameter.__torch_function__ to avoid potential "
"TorchDynamo issues.",
torch.__version__,
)
return NotImplemented
if kwargs is None:
kwargs = {}
return super().__torch_function__(func, types, args, kwargs)