Lazy loading to avoid importing all files (#29716)

Signed-off-by: Luke <yq0536@gmail.com>
This commit is contained in:
Luke 2025-12-06 23:13:14 -08:00 committed by GitHub
parent 17eb25e327
commit a49d813fa8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -10,46 +10,47 @@ Model configs may be defined in this directory for the following reasons:
deepseek-ai/DeepSeek-V3.2-Exp. deepseek-ai/DeepSeek-V3.2-Exp.
""" """
from transformers import DeepseekV3Config from __future__ import annotations
from vllm.transformers_utils.configs.afmoe import AfmoeConfig import importlib
from vllm.transformers_utils.configs.chatglm import ChatGLMConfig
from vllm.transformers_utils.configs.deepseek_vl2 import DeepseekVLV2Config
from vllm.transformers_utils.configs.dotsocr import DotsOCRConfig
from vllm.transformers_utils.configs.eagle import EAGLEConfig
# RWConfig is for the original tiiuae/falcon-40b(-instruct) and _CLASS_TO_MODULE: dict[str, str] = {
# tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the "AfmoeConfig": "vllm.transformers_utils.configs.afmoe",
# `FalconConfig` class from the official HuggingFace transformers library. "ChatGLMConfig": "vllm.transformers_utils.configs.chatglm",
from vllm.transformers_utils.configs.falcon import RWConfig "DeepseekVLV2Config": "vllm.transformers_utils.configs.deepseek_vl2",
from vllm.transformers_utils.configs.flex_olmo import FlexOlmoConfig "DotsOCRConfig": "vllm.transformers_utils.configs.dotsocr",
from vllm.transformers_utils.configs.hunyuan_vl import ( "EAGLEConfig": "vllm.transformers_utils.configs.eagle",
HunYuanVLConfig, "FlexOlmoConfig": "vllm.transformers_utils.configs.flex_olmo",
HunYuanVLTextConfig, "HunYuanVLConfig": "vllm.transformers_utils.configs.hunyuan_vl",
HunYuanVLVisionConfig, "HunYuanVLTextConfig": "vllm.transformers_utils.configs.hunyuan_vl",
) "HunYuanVLVisionConfig": "vllm.transformers_utils.configs.hunyuan_vl",
from vllm.transformers_utils.configs.jais import JAISConfig # RWConfig is for the original tiiuae/falcon-40b(-instruct) and
from vllm.transformers_utils.configs.kimi_linear import KimiLinearConfig # tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the
from vllm.transformers_utils.configs.kimi_vl import KimiVLConfig # `FalconConfig` class from the official HuggingFace transformers library.
from vllm.transformers_utils.configs.lfm2_moe import Lfm2MoeConfig "RWConfig": "vllm.transformers_utils.configs.falcon",
from vllm.transformers_utils.configs.medusa import MedusaConfig "JAISConfig": "vllm.transformers_utils.configs.jais",
from vllm.transformers_utils.configs.midashenglm import MiDashengLMConfig "Lfm2MoeConfig": "vllm.transformers_utils.configs.lfm2_moe",
from vllm.transformers_utils.configs.mlp_speculator import MLPSpeculatorConfig "MedusaConfig": "vllm.transformers_utils.configs.medusa",
from vllm.transformers_utils.configs.moonvit import MoonViTConfig "MiDashengLMConfig": "vllm.transformers_utils.configs.midashenglm",
from vllm.transformers_utils.configs.nemotron import NemotronConfig "MLPSpeculatorConfig": "vllm.transformers_utils.configs.mlp_speculator",
from vllm.transformers_utils.configs.nemotron_h import NemotronHConfig "MoonViTConfig": "vllm.transformers_utils.configs.moonvit",
from vllm.transformers_utils.configs.olmo3 import Olmo3Config "KimiLinearConfig": "vllm.transformers_utils.configs.kimi_linear",
from vllm.transformers_utils.configs.ovis import OvisConfig "KimiVLConfig": "vllm.transformers_utils.configs.kimi_vl",
from vllm.transformers_utils.configs.qwen3_next import Qwen3NextConfig "NemotronConfig": "vllm.transformers_utils.configs.nemotron",
from vllm.transformers_utils.configs.radio import RadioConfig "NemotronHConfig": "vllm.transformers_utils.configs.nemotron_h",
from vllm.transformers_utils.configs.speculators.base import SpeculatorsConfig "Olmo3Config": "vllm.transformers_utils.configs.olmo3",
from vllm.transformers_utils.configs.step3_vl import ( "OvisConfig": "vllm.transformers_utils.configs.ovis",
Step3TextConfig, "RadioConfig": "vllm.transformers_utils.configs.radio",
Step3VisionEncoderConfig, "SpeculatorsConfig": "vllm.transformers_utils.configs.speculators.base",
Step3VLConfig, "UltravoxConfig": "vllm.transformers_utils.configs.ultravox",
) "Step3VLConfig": "vllm.transformers_utils.configs.step3_vl",
from vllm.transformers_utils.configs.tarsier2 import Tarsier2Config "Step3VisionEncoderConfig": "vllm.transformers_utils.configs.step3_vl",
from vllm.transformers_utils.configs.ultravox import UltravoxConfig "Step3TextConfig": "vllm.transformers_utils.configs.step3_vl",
"Qwen3NextConfig": "vllm.transformers_utils.configs.qwen3_next",
"Tarsier2Config": "vllm.transformers_utils.configs.tarsier2",
# Special case: DeepseekV3Config is from HuggingFace Transformers
"DeepseekV3Config": "transformers",
}
__all__ = [ __all__ = [
"AfmoeConfig", "AfmoeConfig",
@ -84,3 +85,16 @@ __all__ = [
"Qwen3NextConfig", "Qwen3NextConfig",
"Tarsier2Config", "Tarsier2Config",
] ]
def __getattr__(name: str):
if name in _CLASS_TO_MODULE:
module_name = _CLASS_TO_MODULE[name]
module = importlib.import_module(module_name)
return getattr(module, name)
raise AttributeError(f"module 'configs' has no attribute '{name}'")
def __dir__():
return sorted(list(__all__))