mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 16:48:03 +08:00
[fix] setting disable_chat_template while passing prompt_token_ids led to response error (#3228)
* [fix] setting disable_chat_template while passing prompt_token_ids led to response error * [fix] code syntax * [test] add test case for this bug * [test] add test case for empty message list * [test] fix test case for empty message list
This commit is contained in:
@@ -607,12 +607,13 @@ class ChatCompletionRequest(BaseModel):
|
||||
if "messages" in req_dict:
|
||||
del req_dict["messages"]
|
||||
else:
|
||||
assert len(self.messages) > 0
|
||||
|
||||
# If disable_chat_template is set, then the first message in messages will be used as the prompt.
|
||||
if self.disable_chat_template:
|
||||
req_dict["prompt"] = req_dict["messages"][0]["content"]
|
||||
del req_dict["messages"]
|
||||
# If disable_chat_template is set, then the first message in messages will be used as the prompt.
|
||||
assert (
|
||||
len(req_dict["messages"]) > 0
|
||||
), "messages can not be an empty list, unless prompt_token_ids is passed"
|
||||
if self.disable_chat_template:
|
||||
req_dict["prompt"] = req_dict["messages"][0]["content"]
|
||||
del req_dict["messages"]
|
||||
|
||||
guided_json_object = None
|
||||
if self.response_format is not None:
|
||||
|
Reference in New Issue
Block a user