Skip to content

Commit 5ab6c76

Browse files
authored
fix llm streamoptions issue (#1623)
Signed-off-by: letonghan <letong.han@intel.com>
1 parent ca9cf2c commit 5ab6c76

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

comps/cores/proto/api_protocol.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ class ChatCompletionRequest(BaseModel):
292292
service_tier: Optional[str] = None
293293
stop: Union[str, List[str], None] = Field(default_factory=list)
294294
stream: Optional[bool] = False
295-
stream_options: Optional[StreamOptions] = Field(default_factory=StreamOptions)
295+
stream_options: Optional[StreamOptions] = Field(default=None)
296296
temperature: Optional[float] = 0.01 # vllm default 0.7
297297
top_p: Optional[float] = None # openai default 1.0, but tgi needs `top_p` must be > 0.0 and < 1.0, set None
298298
tools: Optional[List[ChatCompletionToolsParam]] = None

0 commit comments

Comments
 (0)