[CI] Update several models in registry that are available online now (#30514)

Signed-off-by: mgoin <mgoin64@gmail.com>
Signed-off-by: Michael Goin <mgoin64@gmail.com>
Co-authored-by: Isotr0py <2037008807@qq.com>
This commit is contained in:
Michael Goin 2025-12-12 21:28:13 -05:00 committed by GitHub
parent f5dfbbd8e9
commit 2f32a68d75
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 6 additions and 6 deletions

View File

@ -692,6 +692,7 @@ steps:
source_file_dependencies: source_file_dependencies:
- vllm/ - vllm/
- tests/models/test_initialization.py - tests/models/test_initialization.py
- tests/models/registry.py
commands: commands:
# Run a subset of model initialization tests # Run a subset of model initialization tests
- pytest -v -s models/test_initialization.py::test_can_initialize_small_subset - pytest -v -s models/test_initialization.py::test_can_initialize_small_subset
@ -704,6 +705,7 @@ steps:
- vllm/model_executor/models/ - vllm/model_executor/models/
- vllm/transformers_utils/ - vllm/transformers_utils/
- tests/models/test_initialization.py - tests/models/test_initialization.py
- tests/models/registry.py
commands: commands:
# Only when vLLM model source is modified - test initialization of a large # Only when vLLM model source is modified - test initialization of a large
# subset of supported models (the complement of the small subset in the above # subset of supported models (the complement of the small subset in the above

View File

@ -356,7 +356,7 @@ _TEXT_GENERATION_EXAMPLE_MODELS = {
), ),
"MistralForCausalLM": _HfExamplesInfo("mistralai/Mistral-7B-Instruct-v0.1"), "MistralForCausalLM": _HfExamplesInfo("mistralai/Mistral-7B-Instruct-v0.1"),
"MistralLarge3ForCausalLM": _HfExamplesInfo( "MistralLarge3ForCausalLM": _HfExamplesInfo(
"mistralai/Mistral-Large-3-675B-Instruct-2512-NVFP4", is_available_online=False "mistralai/Mistral-Large-3-675B-Instruct-2512-NVFP4"
), ),
"MixtralForCausalLM": _HfExamplesInfo( "MixtralForCausalLM": _HfExamplesInfo(
"mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1",
@ -635,7 +635,7 @@ _MULTIMODAL_EXAMPLE_MODELS = {
), ),
"HunYuanVLForConditionalGeneration": _HfExamplesInfo( "HunYuanVLForConditionalGeneration": _HfExamplesInfo(
"tencent/HunyuanOCR", "tencent/HunyuanOCR",
is_available_online=False, hf_overrides={"num_experts": 0},
), ),
"Idefics3ForConditionalGeneration": _HfExamplesInfo( "Idefics3ForConditionalGeneration": _HfExamplesInfo(
"HuggingFaceM4/Idefics3-8B-Llama3", "HuggingFaceM4/Idefics3-8B-Llama3",
@ -674,8 +674,7 @@ _MULTIMODAL_EXAMPLE_MODELS = {
"https://huggingface.co/moonshotai/Kimi-VL-A3B-Instruct/discussions/31", "https://huggingface.co/moonshotai/Kimi-VL-A3B-Instruct/discussions/31",
), ),
"LightOnOCRForConditionalGeneration": _HfExamplesInfo( "LightOnOCRForConditionalGeneration": _HfExamplesInfo(
"lightonai/LightOnOCR-1B", "lightonai/LightOnOCR-1B-1025"
is_available_online=False,
), ),
"Llama4ForConditionalGeneration": _HfExamplesInfo( "Llama4ForConditionalGeneration": _HfExamplesInfo(
"meta-llama/Llama-4-Scout-17B-16E-Instruct", "meta-llama/Llama-4-Scout-17B-16E-Instruct",
@ -779,8 +778,6 @@ _MULTIMODAL_EXAMPLE_MODELS = {
"ministral-3": "mistralai/Ministral-3-3B-Instruct-2512", "ministral-3": "mistralai/Ministral-3-3B-Instruct-2512",
}, },
tokenizer_mode="mistral", tokenizer_mode="mistral",
# TODO: revert once Mistral-Large-3 and Ministral-3 are publicly available.
is_available_online=False,
), ),
"QwenVLForConditionalGeneration": _HfExamplesInfo( "QwenVLForConditionalGeneration": _HfExamplesInfo(
"Qwen/Qwen-VL", "Qwen/Qwen-VL",
@ -886,6 +883,7 @@ _SPECULATIVE_DECODING_EXAMPLE_MODELS = {
"EagleMistralLarge3ForCausalLM": _HfExamplesInfo( "EagleMistralLarge3ForCausalLM": _HfExamplesInfo(
"mistralai/Mistral-Large-3-675B-Instruct-2512", "mistralai/Mistral-Large-3-675B-Instruct-2512",
speculative_model="mistralai/Mistral-Large-3-675B-Instruct-2512-Eagle", speculative_model="mistralai/Mistral-Large-3-675B-Instruct-2512-Eagle",
# TODO: revert once figuring out OOM in CI
is_available_online=False, is_available_online=False,
), ),
"LlamaForCausalLMEagle3": _HfExamplesInfo( "LlamaForCausalLMEagle3": _HfExamplesInfo(