mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-24 17:55:50 +08:00
[Bugfix] Fix incorrect resolving order for transformers fallback (#15279)
Signed-off-by: Isotr0py <2037008807@qq.com>
This commit is contained in:
parent
47c7126213
commit
84e00adc8a
@ -418,11 +418,13 @@ class _ModelRegistry:
|
||||
if not architectures:
|
||||
logger.warning("No model architectures are specified")
|
||||
|
||||
normalized_arch = []
|
||||
for model in architectures:
|
||||
if model not in self.models:
|
||||
model = "TransformersModel"
|
||||
normalized_arch.append(model)
|
||||
# filter out support architectures
|
||||
normalized_arch = list(
|
||||
filter(lambda model: model in self.models, architectures))
|
||||
|
||||
# make sure Transformers fallback are put at the last
|
||||
if len(normalized_arch) != len(architectures):
|
||||
normalized_arch.append("TransformersModel")
|
||||
return normalized_arch
|
||||
|
||||
def inspect_model_cls(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user