mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 17:17:14 +08:00
Feature/online/vs think 20250813 (#3440)
* add stream * fix ernie_vl_reasoning_parsers * fix bug
This commit is contained in:
@@ -168,7 +168,8 @@ class DataProcessor(BaseDataProcessor):
|
||||
self._init_config()
|
||||
|
||||
self.decode_status = dict()
|
||||
self.tool_parsers = dict()
|
||||
self.tool_parser_dict = dict()
|
||||
self.reasoning_end_dict = dict()
|
||||
self.tokenizer = self._load_tokenizer()
|
||||
data_processor_logger.info(
|
||||
f"tokenizer information: bos_token is {self.tokenizer.bos_token}, {self.tokenizer.bos_token_id}, \
|
||||
@@ -398,8 +399,12 @@ class DataProcessor(BaseDataProcessor):
|
||||
token_ids = token_ids[:-1]
|
||||
delta_text, previous_token_ids, previous_texts = self.ids2tokens(token_ids, req_id)
|
||||
response_dict["outputs"]["raw_prediction"] = delta_text
|
||||
if enable_thinking and self.reasoning_parser:
|
||||
reasoning_content, text = self.reasoning_parser.extract_reasoning_content_streaming(
|
||||
if (
|
||||
self.reasoning_parser
|
||||
and req_id not in self.reasoning_end_dict
|
||||
and (enable_thinking or self.reasoning_parser.__class__.__name__ == "ErnieX1ReasoningParser")
|
||||
):
|
||||
reasoning_delta_message = self.reasoning_parser.extract_reasoning_content_streaming(
|
||||
previous_texts,
|
||||
previous_texts + delta_text,
|
||||
delta_text,
|
||||
@@ -407,14 +412,13 @@ class DataProcessor(BaseDataProcessor):
|
||||
previous_token_ids + token_ids,
|
||||
token_ids,
|
||||
)
|
||||
response_dict["outputs"]["text"] = text
|
||||
response_dict["outputs"]["reasoning_content"] = reasoning_content
|
||||
else:
|
||||
response_dict["outputs"]["text"] = delta_text
|
||||
if self.tool_parser_obj and not is_end:
|
||||
if req_id not in self.tool_parsers:
|
||||
self.tool_parsers[req_id] = self.tool_parser_obj(self.tokenizer)
|
||||
tool_parser = self.tool_parsers[req_id]
|
||||
response_dict["outputs"]["reasoning_delta_message"] = reasoning_delta_message
|
||||
if self.reasoning_parser.is_reasoning_end(previous_token_ids + token_ids):
|
||||
self.reasoning_end_dict[req_id] = True
|
||||
if self.tool_parser_obj and req_id in self.reasoning_end_dict:
|
||||
if req_id not in self.tool_parser_dict:
|
||||
self.tool_parser_dict[req_id] = self.tool_parser_obj(self.tokenizer)
|
||||
tool_parser = self.tool_parser_dict[req_id]
|
||||
tool_call = tool_parser.extract_tool_calls_streaming(
|
||||
previous_texts,
|
||||
previous_texts + delta_text,
|
||||
@@ -425,11 +429,14 @@ class DataProcessor(BaseDataProcessor):
|
||||
response_dict,
|
||||
)
|
||||
response_dict["outputs"]["tool_delta_message"] = tool_call
|
||||
response_dict["outputs"]["text"] = delta_text
|
||||
if is_end:
|
||||
data_processor_logger.info(f"req_id:{req_id}, decode_status: {self.decode_status[req_id]}")
|
||||
del self.decode_status[req_id]
|
||||
if req_id in self.tool_parsers:
|
||||
del self.tool_parsers[req_id]
|
||||
if req_id in self.tool_parser_dict:
|
||||
del self.tool_parser_dict[req_id]
|
||||
if req_id in self.reasoning_end_dict:
|
||||
del self.reasoning_end_dict[req_id]
|
||||
return response_dict
|
||||
|
||||
def process_response_dict(self, response_dict, **kwargs):
|
||||
|
Reference in New Issue
Block a user