mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 03:15:20 +08:00
[doc] fix doc build error caused by msgspec (#7659)
This commit is contained in:
parent
67e02fa8a4
commit
e54ebc2f8f
@ -3,6 +3,7 @@ sphinx-book-theme==1.0.1
|
|||||||
sphinx-copybutton==0.5.2
|
sphinx-copybutton==0.5.2
|
||||||
myst-parser==2.0.0
|
myst-parser==2.0.0
|
||||||
sphinx-argparse==0.4.0
|
sphinx-argparse==0.4.0
|
||||||
|
msgspec
|
||||||
|
|
||||||
# packages to install to build the documentation
|
# packages to install to build the documentation
|
||||||
pydantic
|
pydantic
|
||||||
|
|||||||
@ -1,23 +1,54 @@
|
|||||||
import torch
|
|
||||||
|
|
||||||
from .interface import Platform, PlatformEnum, UnspecifiedPlatform
|
from .interface import Platform, PlatformEnum, UnspecifiedPlatform
|
||||||
|
|
||||||
current_platform: Platform
|
current_platform: Platform
|
||||||
|
|
||||||
try:
|
# NOTE: we don't use `torch.version.cuda` / `torch.version.hip` because
|
||||||
import libtpu
|
# they only indicate the build configuration, not the runtime environment.
|
||||||
except ImportError:
|
# For example, people can install a cuda build of pytorch but run on tpu.
|
||||||
libtpu = None
|
|
||||||
|
|
||||||
if libtpu is not None:
|
is_tpu = False
|
||||||
|
try:
|
||||||
|
import torch_xla.core.xla_model as xm
|
||||||
|
xm.xla_device(devkind="TPU")
|
||||||
|
is_tpu = True
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
is_cuda = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pynvml
|
||||||
|
pynvml.nvmlInit()
|
||||||
|
try:
|
||||||
|
if pynvml.nvmlDeviceGetCount() > 0:
|
||||||
|
is_cuda = True
|
||||||
|
finally:
|
||||||
|
pynvml.nvmlShutdown()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
is_rocm = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import amdsmi
|
||||||
|
amdsmi.amdsmi_init()
|
||||||
|
try:
|
||||||
|
if len(amdsmi.amdsmi_get_processor_handles()) > 0:
|
||||||
|
is_rocm = True
|
||||||
|
finally:
|
||||||
|
amdsmi.amdsmi_shut_down()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if is_tpu:
|
||||||
# people might install pytorch built with cuda but run on tpu
|
# people might install pytorch built with cuda but run on tpu
|
||||||
# so we need to check tpu first
|
# so we need to check tpu first
|
||||||
from .tpu import TpuPlatform
|
from .tpu import TpuPlatform
|
||||||
current_platform = TpuPlatform()
|
current_platform = TpuPlatform()
|
||||||
elif torch.version.cuda is not None:
|
elif is_cuda:
|
||||||
from .cuda import CudaPlatform
|
from .cuda import CudaPlatform
|
||||||
current_platform = CudaPlatform()
|
current_platform = CudaPlatform()
|
||||||
elif torch.version.hip is not None:
|
elif is_rocm:
|
||||||
from .rocm import RocmPlatform
|
from .rocm import RocmPlatform
|
||||||
current_platform = RocmPlatform()
|
current_platform = RocmPlatform()
|
||||||
else:
|
else:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user