[fix] setting disable_chat_template while passing prompt_token_ids led to response error (#3511)

* [fix] setting disable_chat_template while passing prompt_token_ids led to response error

* [fix] code syntax

* [test] add test case for this bug

* [test] add test case for empty message list

* [test] fix test case for empty message list
This commit is contained in:
李泳桦
2025-08-21 17:33:10 +08:00
committed by GitHub
parent 8bf48dfab8
commit 1b399b91c0
2 changed files with 30 additions and 7 deletions

View File

@@ -563,12 +563,13 @@ class ChatCompletionRequest(BaseModel):
if "messages" in req_dict:
del req_dict["messages"]
else:
assert len(self.messages) > 0
# If disable_chat_template is set, then the first message in messages will be used as the prompt.
if self.disable_chat_template:
req_dict["prompt"] = req_dict["messages"][0]["content"]
del req_dict["messages"]
# If disable_chat_template is set, then the first message in messages will be used as the prompt.
assert (
len(req_dict["messages"]) > 0
), "messages can not be an empty list, unless prompt_token_ids is passed"
if self.disable_chat_template:
req_dict["prompt"] = req_dict["messages"][0]["content"]
del req_dict["messages"]
guided_json_object = None
if self.response_format is not None: