mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 08:37:06 +08:00
Completion add raw_prediction/text_after_process (#3356)
This commit is contained in:
@@ -153,7 +153,7 @@ class ErnieProcessor(BaseDataProcessor):
|
||||
if request.get("prompt"):
|
||||
prompt = request.get("prompt")
|
||||
prompt = prompt[0] if isinstance(prompt, list) else prompt
|
||||
|
||||
request["text_after_process"] = prompt
|
||||
tokens = self.tokenizer.tokenize(prompt)
|
||||
token_ids = self.tokenizer.convert_tokens_to_ids(tokens)
|
||||
request["prompt_token_ids"] = token_ids
|
||||
@@ -247,6 +247,7 @@ class ErnieProcessor(BaseDataProcessor):
|
||||
response_dict["outputs"]["reasoning_content"] = reasoning_content
|
||||
else:
|
||||
response_dict["outputs"]["text"] = full_text
|
||||
response_dict["outputs"]["raw_prediction"] = full_text
|
||||
data_processor_logger.info(f"req_id:{req_id}, decode_status: {self.decode_status[req_id]}")
|
||||
del self.decode_status[req_id]
|
||||
return response_dict
|
||||
@@ -283,6 +284,7 @@ class ErnieProcessor(BaseDataProcessor):
|
||||
response_dict["outputs"]["reasoning_content"] = reasoning_content
|
||||
else:
|
||||
response_dict["outputs"]["text"] = delta_text
|
||||
response_dict["outputs"]["raw_prediction"] = delta_text
|
||||
if is_end:
|
||||
data_processor_logger.info(f"req_id:{req_id}, decode_status: {self.decode_status[req_id]}")
|
||||
del self.decode_status[req_id]
|
||||
@@ -307,7 +309,7 @@ class ErnieProcessor(BaseDataProcessor):
|
||||
split_special_tokens=False,
|
||||
add_special_tokens=False,
|
||||
)
|
||||
|
||||
request_or_messages["text_after_process"] = spliced_message
|
||||
req_id = None
|
||||
if isinstance(request_or_messages, dict):
|
||||
req_id = request_or_messages.get("request_id", None)
|
||||
|
Reference in New Issue
Block a user