Signed-off-by: yewentao256 <zhyanwentao@126.com>
This commit is contained in:
yewentao256 2025-10-17 12:30:32 -07:00
parent 478b04e423
commit fe4fe2538e

View File

@ -523,19 +523,16 @@ def test_logprobs_WITHOUT_batch_invariance_should_FAIL(backend):
long_min = int(os.getenv("VLLM_MIN_PROMPT", "768")) long_min = int(os.getenv("VLLM_MIN_PROMPT", "768"))
long_max = int(os.getenv("VLLM_MAX_PROMPT", "2048")) long_max = int(os.getenv("VLLM_MAX_PROMPT", "2048"))
prompts: list[str] = [] prompts: list[str] = []
for i in range(32): options = [
if i % 4 == 0: (max(long_min, 1536), max(long_max, 3072)), # very long
# very long (max(1024, long_min), max(2048, long_max)), # long
prompts.append(_random_prompt(max(long_min, 1536), max(long_max, 3072))) (256, 512), # mid
elif i % 4 == 1: (10, 20), # short
# long ]
prompts.append(_random_prompt(max(1024, long_min), max(2048, long_max)))
elif i % 4 == 2: for _ in range(32):
# mid lo, hi = random.choice(options)
prompts.append(_random_prompt(256, 512)) prompts.append(_random_prompt(lo, hi))
else:
# short
prompts.append(_random_prompt(10, 20))
sp = SamplingParams( sp = SamplingParams(
temperature=0.6, temperature=0.6,