From 367a480bd3534edf27a8dac3c6f7ea8af9d1ed45 Mon Sep 17 00:00:00 2001 From: Michael Yao Date: Sun, 21 Sep 2025 07:39:47 +0800 Subject: [PATCH] [Docs] Fix warnings in vllm/profiler and vllm/transformers_utils (#25220) Signed-off-by: windsonsea --- mkdocs.yaml | 1 + vllm/profiler/layerwise_profile.py | 4 ++-- vllm/transformers_utils/configs/jais.py | 3 +-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mkdocs.yaml b/mkdocs.yaml index 6f2be65a18af..1535fcc622cd 100644 --- a/mkdocs.yaml +++ b/mkdocs.yaml @@ -102,6 +102,7 @@ plugins: - https://numpy.org/doc/stable/objects.inv - https://pytorch.org/docs/stable/objects.inv - https://psutil.readthedocs.io/en/stable/objects.inv + - https://huggingface.co/docs/transformers/main/en/objects.inv markdown_extensions: - attr_list diff --git a/vllm/profiler/layerwise_profile.py b/vllm/profiler/layerwise_profile.py index 2f9ebe531cbb..41136f738c28 100644 --- a/vllm/profiler/layerwise_profile.py +++ b/vllm/profiler/layerwise_profile.py @@ -353,8 +353,8 @@ class layerwise_profile(profile): Args: num_running_seqs (Optional[int], optional): When given, - num_running_seqs will be passed to LayerProfileResults for metadata - update. Defaults to None. + num_running_seqs will be passed to LayerProfileResults + for metadata update. Defaults to None. """ super().__init__( activities=[ProfilerActivity.CPU, ProfilerActivity.CUDA], diff --git a/vllm/transformers_utils/configs/jais.py b/vllm/transformers_utils/configs/jais.py index d5ca2c7b4751..3f50638f16b5 100644 --- a/vllm/transformers_utils/configs/jais.py +++ b/vllm/transformers_utils/configs/jais.py @@ -74,8 +74,7 @@ class JAISConfig(PretrainedConfig): use_cache (`bool`, *optional*, defaults to `True`): Whether or not the model should return the last key/values attentions (not used by all models). - scale_attn_by_inverse_layer_idx - (`bool`, *optional*, defaults to `False`): + scale_attn_by_inverse_layer_idx (`bool`, *optional*, default `True`): Whether to additionally scale attention weights by `1 / layer_idx + 1`. reorder_and_upcast_attn (`bool`, *optional*, defaults to `False`):