[XPU] upgrade torch 2.8 on for XPU (#22300)

Signed-off-by: Kunshang Ji <kunshang.ji@intel.com>
This commit is contained in:
Kunshang Ji 2025-08-09 08:03:45 +08:00 committed by GitHub
parent 311d875614
commit 81c57f60a2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 15 additions and 24 deletions

View File

@ -1,9 +1,12 @@
# oneapi 2025.0.2 docker base image use rolling 2448 package. https://dgpu-docs.intel.com/releases/packages.html?release=Rolling+2448.13&os=Ubuntu+22.04, and we don't need install driver manually.
FROM intel/deep-learning-essentials:2025.0.2-0-devel-ubuntu22.04 AS vllm-base
FROM intel/deep-learning-essentials:2025.1.3-0-devel-ubuntu24.04 AS vllm-base
RUN rm /etc/apt/sources.list.d/intel-graphics.list
RUN apt-get update -y && \
RUN apt clean && apt-get update -y && \
apt-get install -y software-properties-common && \
add-apt-repository ppa:deadsnakes/ppa && \
apt-get install -y python3.10 python3.10-distutils && \
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 && \
apt-get install -y --no-install-recommends --fix-missing \
curl \
ffmpeg \
@ -14,11 +17,13 @@ RUN apt-get update -y && \
libgl1 \
lsb-release \
numactl \
python3 \
python3-dev \
python3-pip \
python3.10-dev \
wget
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 1
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1
WORKDIR /workspace/vllm
COPY requirements/xpu.txt /workspace/vllm/requirements/xpu.txt
COPY requirements/common.txt /workspace/vllm/requirements/common.txt

View File

@ -10,15 +10,10 @@ wheel
jinja2>=3.1.6
datasets # for benchmark scripts
numba == 0.60.0 # v0.61 doesn't support Python 3.9. Required for N-gram speculative decoding
torch==2.7.0+xpu
--extra-index-url=https://download.pytorch.org/whl/xpu
torch==2.8.0+xpu
torchaudio
torchvision
pytorch-triton-xpu
--extra-index-url=https://download.pytorch.org/whl/xpu
# Please refer xpu doc, we need manually install intel-extension-for-pytorch 2.6.10+xpu due to there are some conflict dependencies with torch 2.6.0+xpu
# FIXME: This will be fix in ipex 2.7. just leave this here for awareness.
intel-extension-for-pytorch==2.7.10+xpu
oneccl_bind_pt==2.7.0+xpu
--extra-index-url=https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
intel-extension-for-pytorch==2.8.10+xpu

View File

@ -4,8 +4,6 @@
import logging
from typing import Any, Callable
import torch
import vllm.envs as envs
logger = logging.getLogger(__name__)
@ -68,13 +66,6 @@ def load_general_plugins():
return
plugins_loaded = True
# some platform-specific configurations
from vllm.platforms import current_platform
if current_platform.is_xpu():
# see https://github.com/pytorch/pytorch/blob/43c5f59/torch/_dynamo/config.py#L158
torch._dynamo.config.disable = True
plugins = load_plugins_by_group(group=DEFAULT_PLUGINS_GROUP)
# general plugins, we only need to execute the loaded functions
for func in plugins.values():

View File

@ -152,7 +152,7 @@ class XPUWorker(Worker):
raise RuntimeError(
f"Not support device type: {self.device_config.device}")
ENV_CCL_ZE_IPC_EXCHANGE = os.getenv("CCL_ZE_IPC_EXCHANGE", "drmfd")
ENV_CCL_ZE_IPC_EXCHANGE = os.getenv("CCL_ZE_IPC_EXCHANGE", "pidfd")
ENV_CCL_ATL_TRANSPORT = os.getenv("CCL_ATL_TRANSPORT", "ofi")
ENV_LOCAL_WORLD_SIZE = os.getenv("LOCAL_WORLD_SIZE",
str(self.parallel_config.world_size))