[BugFix] fix max streaming tokens invalid (#3799)

* Update serving_chat.py

* Update serving_completion.py

* Update serving_completion.py
This commit is contained in:
ltd0924
2025-09-02 21:03:13 +08:00
committed by GitHub
parent 0f42771a84
commit cd09384a14
2 changed files with 2 additions and 10 deletions

View File

@@ -182,7 +182,7 @@ class OpenAIServingChat:
if request.max_streaming_response_tokens is not None
else (request.metadata or {}).get("max_streaming_response_tokens", 1)
) # dierctly passed & passed in metadata
max_streaming_response_tokens = max(max_streaming_response_tokens, 1)
enable_thinking = request.chat_template_kwargs.get("enable_thinking") if request.chat_template_kwargs else None
if enable_thinking is None:
enable_thinking = request.metadata.get("enable_thinking") if request.metadata else None
@@ -370,11 +370,6 @@ class OpenAIServingChat:
api_server_logger.info(f"Chat Streaming response last send: {chunk.model_dump_json()}")
choices = []
if choices:
chunk.choices = choices
yield f"data: {chunk.model_dump_json(exclude_unset=True)}\n\n"
choices = []
if include_usage:
completion_tokens = previous_num_tokens
usage = UsageInfo(