mirror of
https://git.datalinker.icu/vllm-project/vllm.git
synced 2025-12-22 04:35:01 +08:00
[Bugfix] Fix the default value for temperature in ChatCompletionRequest (#11219)
This commit is contained in:
parent
69ba344de8
commit
17138af7c4
@ -211,7 +211,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
|
||||
stop: Optional[Union[str, List[str]]] = Field(default_factory=list)
|
||||
stream: Optional[bool] = False
|
||||
stream_options: Optional[StreamOptions] = None
|
||||
temperature: Optional[float] = 0.7
|
||||
temperature: Optional[float] = 1.0
|
||||
top_p: Optional[float] = 1.0
|
||||
tools: Optional[List[ChatCompletionToolsParam]] = None
|
||||
tool_choice: Optional[Union[Literal["none"], Literal["auto"],
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user