[Misc] Fix grammar errors in comments and messages (#31115)

Signed-off-by: c0de128 <kevin.mckay@outlook.com>
This commit is contained in:
Kevin McKay 2025-12-21 23:14:02 -06:00 committed by GitHub
parent ec58c10ce1
commit 42b42824ae
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 4 additions and 4 deletions

View File

@ -83,7 +83,7 @@ def test_compressed_tensors_w8a8_static_setup(vllm_runner, model_args):
current_platform.is_rocm()
and model_path not in ROCM_TRITON_SCALED_MM_SUPPORTED_INT8_MODEL
):
pytest.skip(f"Skip model {model_path} as it is not support on ROCm.")
pytest.skip(f"Skip model {model_path} as it is not supported on ROCm.")
with vllm_runner(model_path, enforce_eager=True) as llm:
@ -161,7 +161,7 @@ def test_compressed_tensors_w8a8_logprobs(
current_platform.is_rocm()
and model_path not in ROCM_TRITON_SCALED_MM_SUPPORTED_INT8_MODEL
):
pytest.skip(f"Skip model {model_path} as it is not support on ROCm.")
pytest.skip(f"Skip model {model_path} as it is not supported on ROCm.")
if use_aiter:
if model_path not in ROCM_AITER_SUPPORTED_INT8_MODEL:
@ -231,7 +231,7 @@ def test_compressed_tensors_w8a8_dynamic_per_token(
current_platform.is_rocm()
and model_path not in ROCM_TRITON_SCALED_MM_SUPPORTED_INT8_MODEL
):
pytest.skip(f"Skip model {model_path} as it is not support on ROCm.")
pytest.skip(f"Skip model {model_path} as it is not supported on ROCm.")
if use_aiter:
if model_path not in ROCM_AITER_SUPPORTED_INT8_MODEL:

View File

@ -15,7 +15,7 @@ def merge_attn_states(
output_lse: torch.Tensor | None = None,
) -> None:
# NOTE(DefTruth): Currently, custom merge_attn_states CUDA kernel
# is not support for FP8 dtype, fallback to use Triton kernel.
# does not support FP8 dtype, fallback to use Triton kernel.
def supported_dtypes(o: torch.Tensor) -> bool:
return o.dtype in [torch.float32, torch.half, torch.bfloat16]