[Fix] Fix the condition of max_seq_len (#477)

This commit is contained in:
Zhuohan Li 2023-07-17 00:33:48 -04:00 committed by GitHub
parent 58df2883cb
commit 2bdea7ac11
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 2 deletions

View File

@ -190,7 +190,7 @@ class Scheduler:
break
num_prompt_tokens = seq_group.get_seqs()[0].get_len()
if num_prompt_tokens >= self.scheduler_config.max_seq_len:
if num_prompt_tokens > self.scheduler_config.max_seq_len:
logger.warning(
f"Input prompt ({num_prompt_tokens} tokens) is too long"
" and exceeds limit of "

View File

@ -300,7 +300,7 @@ class LLMEngine:
continue
# Check if the sequence has reached max_seq_len.
if (seq.get_len() >=
if (seq.get_len() >
self.scheduler.scheduler_config.max_seq_len):
self.scheduler.free_seq(
seq, SequenceStatus.FINISHED_LENGTH_CAPPED)