mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-10 15:25:28 +08:00
[Bugfix] Deprecate registration of custom configs to huggingface (#9083)
This commit is contained in:
parent
15986f598c
commit
cfadb9c687
@ -97,7 +97,8 @@ def run_test(
|
|||||||
self.tokenizer = hf_runner.tokenizer
|
self.tokenizer = hf_runner.tokenizer
|
||||||
self.dtype = hf_runner.model.dtype
|
self.dtype = hf_runner.model.dtype
|
||||||
|
|
||||||
self.config = AutoConfig.from_pretrained(hf_runner.model_name)
|
self.config = AutoConfig.from_pretrained(hf_runner.model_name,
|
||||||
|
trust_remote_code=True)
|
||||||
self.vision_config = self.config.vision_config
|
self.vision_config = self.config.vision_config
|
||||||
self.use_thumbnail = self.config.use_thumbnail
|
self.use_thumbnail = self.config.use_thumbnail
|
||||||
self.min_num = self.config.min_dynamic_patch
|
self.min_num = self.config.min_dynamic_patch
|
||||||
|
|||||||
@ -195,11 +195,6 @@ def _run_test(
|
|||||||
def process(hf_inputs: BatchEncoding):
|
def process(hf_inputs: BatchEncoding):
|
||||||
return hf_inputs
|
return hf_inputs
|
||||||
|
|
||||||
from transformers.models.mllama import MllamaConfig as MllamaConfigHf
|
|
||||||
|
|
||||||
# use transformer's MllamaConfig for hf_runner
|
|
||||||
# and vllm's MllamaConfig for vllm_runner
|
|
||||||
AutoConfig.register("mllama", MllamaConfigHf, exist_ok=True)
|
|
||||||
with hf_runner(model,
|
with hf_runner(model,
|
||||||
dtype=dtype,
|
dtype=dtype,
|
||||||
model_kwargs={"device_map": "auto"},
|
model_kwargs={"device_map": "auto"},
|
||||||
@ -213,8 +208,6 @@ def _run_test(
|
|||||||
for prompts, images in inputs
|
for prompts, images in inputs
|
||||||
]
|
]
|
||||||
|
|
||||||
from vllm.transformers_utils.configs.mllama import MllamaConfig
|
|
||||||
AutoConfig.register("mllama", MllamaConfig, exist_ok=True)
|
|
||||||
for hf_outputs, vllm_outputs in zip(hf_outputs_per_image,
|
for hf_outputs, vllm_outputs in zip(hf_outputs_per_image,
|
||||||
vllm_outputs_per_image):
|
vllm_outputs_per_image):
|
||||||
check_logprobs_close(
|
check_logprobs_close(
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
import contextlib
|
|
||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -61,13 +60,6 @@ _CONFIG_REGISTRY: Dict[str, Type[PretrainedConfig]] = {
|
|||||||
**_CONFIG_REGISTRY_OVERRIDE_HF
|
**_CONFIG_REGISTRY_OVERRIDE_HF
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, cls in _CONFIG_REGISTRY.items():
|
|
||||||
with contextlib.suppress(ValueError):
|
|
||||||
if name in _CONFIG_REGISTRY_OVERRIDE_HF:
|
|
||||||
AutoConfig.register(name, cls, exist_ok=True)
|
|
||||||
else:
|
|
||||||
AutoConfig.register(name, cls)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigFormat(str, enum.Enum):
|
class ConfigFormat(str, enum.Enum):
|
||||||
AUTO = "auto"
|
AUTO = "auto"
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user