[Misc] Remove isort and yapf ignores (#26888)

Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
Cyrus Leung 2025-10-15 20:09:03 +08:00 committed by GitHub
parent f54f85129e
commit f93e348010
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 2 additions and 10 deletions

View File

@ -6,6 +6,7 @@ import torch
from transformers import AutoTokenizer
from tests.v1.engine.utils import (
FULL_STRINGS,
NUM_PROMPT_LOGPROBS_UNDER_TEST,
NUM_SAMPLE_LOGPROBS_UNDER_TEST,
PROMPT_LEN,
@ -18,8 +19,6 @@ from vllm.engine.arg_utils import EngineArgs
from ...distributed.conftest import publisher_config, random_port # noqa: F401
from tests.v1.engine.utils import FULL_STRINGS # isort: skip
EngineCoreSampleLogprobsType = list[tuple[torch.Tensor, torch.Tensor]]
EngineCorePromptLogprobsType = tuple[torch.Tensor, torch.Tensor]

View File

@ -8,10 +8,7 @@ import torch_xla
from vllm.platforms import current_platform
from vllm.v1.sample.ops.topk_topp_sampler import apply_top_k_top_p
# isort: off
from vllm.v1.sample.tpu.sampler import apply_top_k_top_p as apply_top_k_top_p_tpu
# isort: on
if not current_platform.is_tpu():
pytest.skip("This test needs a TPU.", allow_module_level=True)

View File

@ -15,6 +15,7 @@ from .compressed_tensors_w8a8_int8 import CompressedTensorsW8A8Int8
from .compressed_tensors_w8a16_fp8 import CompressedTensorsW8A16Fp8
from .compressed_tensors_wNa16 import WNA16_SUPPORTED_BITS, CompressedTensorsWNA16
# This avoids circular import error
from .compressed_tensors_24 import CompressedTensors24 # isort: skip
__all__ = [

View File

@ -80,17 +80,12 @@ from .interfaces import (
SupportsMultiModal,
SupportsPP,
)
# yapf conflicts with isort for this block
# yapf: disable
from .qwen2_5_omni_thinker import (
Qwen2_5OmniConditionalGenerationMixin,
Qwen2_5OmniThinkerDummyInputsBuilder,
Qwen2_5OmniThinkerMultiModalProcessor,
Qwen2_5OmniThinkerProcessingInfo,
)
# yapf: enable
from .qwen2_5_vl import (
Qwen2_5_VisionAttention,
Qwen2_5_VisionRotaryEmbedding,