mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-17 23:46:21 +08:00
[Bugfix] Fix 'ModuleNotFoundError: No module named 'intel_extension_for_pytorch'' for --tensor-parallel-size more than 1 (#12546)
This commit is contained in:
parent
c53dc466b1
commit
022bcc701a
@ -329,9 +329,17 @@ class GroupCoordinator:
|
|||||||
return input_
|
return input_
|
||||||
|
|
||||||
if input_.is_cpu:
|
if input_.is_cpu:
|
||||||
import intel_extension_for_pytorch as ipex
|
try:
|
||||||
ipex.distributed.all_reduce(input_, group=self.device_group)
|
import intel_extension_for_pytorch as ipex
|
||||||
return input_
|
ipex.distributed.all_reduce(input_, group=self.device_group)
|
||||||
|
return input_
|
||||||
|
except ImportError:
|
||||||
|
"""
|
||||||
|
Intel IPEX not found. Falling back to PyTorch native
|
||||||
|
all_reduce for CPU
|
||||||
|
"""
|
||||||
|
torch.distributed.all_reduce(input_, group=self.device_group)
|
||||||
|
return input_
|
||||||
|
|
||||||
if self.tpu_communicator is not None and \
|
if self.tpu_communicator is not None and \
|
||||||
not self.tpu_communicator.disabled:
|
not self.tpu_communicator.disabled:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user