From 906e05d8405f682797b5c934ec84d3c272a5fafd Mon Sep 17 00:00:00 2001 From: Jee Jee Li Date: Sat, 5 Jul 2025 13:48:16 +0800 Subject: [PATCH] [Misc] Remove the unused LoRA test code (#20494) Signed-off-by: Jee Jee Li --- tests/lora/conftest.py | 17 ----------------- vllm/multimodal/utils.py | 2 +- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/tests/lora/conftest.py b/tests/lora/conftest.py index 4908f9a060f7f..881d5efa69193 100644 --- a/tests/lora/conftest.py +++ b/tests/lora/conftest.py @@ -249,23 +249,6 @@ def llama_2_7b_model_extra_embeddings(llama_2_7b_engine_extra_embeddings): model_runner.model) -@pytest.fixture(params=[True, False]) -def run_with_both_engines_lora(request, monkeypatch): - # Automatically runs tests twice, once with V1 and once without - use_v1 = request.param - # Tests decorated with `@skip_v1` are only run without v1 - skip_v1 = request.node.get_closest_marker("skip_v1") - - if use_v1: - if skip_v1: - pytest.skip("Skipping test on vllm V1") - monkeypatch.setenv('VLLM_USE_V1', '1') - else: - monkeypatch.setenv('VLLM_USE_V1', '0') - - yield - - @pytest.fixture def reset_default_device(): """ diff --git a/vllm/multimodal/utils.py b/vllm/multimodal/utils.py index 6a4a998c7e594..8dfbc6503520e 100644 --- a/vllm/multimodal/utils.py +++ b/vllm/multimodal/utils.py @@ -489,4 +489,4 @@ def fetch_video( "video": video_io_kwargs } media_connector = MediaConnector(media_io_kwargs=media_io_kwargs) - return media_connector.fetch_video(video_url) + return media_connector.fetch_video(video_url) \ No newline at end of file