[CI/Build] Fix OpenAI API correctness on AMD CI (#28022)

Signed-off-by: zhewenli <zhewenli@meta.com>
This commit is contained in:
Zhewen Li 2025-11-03 23:20:50 -08:00 committed by GitHub
parent 43a6acfb7d
commit 53f6e81dfd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -629,15 +629,16 @@ steps:
- label: OpenAI API correctness # 22min
timeout_in_minutes: 30
mirror_hardwares: [amdexperimental]
mirror_hardwares: [amdexperimental, amdproduction]
agent_pool: mi325_1
# grade: Blocking
source_file_dependencies:
- csrc/
- vllm/entrypoints/openai/
- vllm/model_executor/models/whisper.py
commands: # LMEval+Transcription WER check
- pytest -s entrypoints/openai/correctness/
commands: # LMEval
# Transcription WER check is skipped because encoder-decoder models are not supported on ROCm, see https://github.com/vllm-project/vllm/issues/27442
- pytest -s entrypoints/openai/correctness/ --ignore entrypoints/openai/correctness/test_transcription_api_correctness.py
- label: OpenAI-Compatible Tool Use # 23 min
timeout_in_minutes: 35