From eb309e5a2a2fe38eb702f5d398f682a324cdc8e1 Mon Sep 17 00:00:00 2001 From: ddchenhao66 <165133255+ddchenhao66@users.noreply.github.com> Date: Tue, 23 Dec 2025 11:00:53 +0800 Subject: [PATCH] [XPU]Set top_p=0.0 by default on XPU to optimize performance (#5688) Co-authored-by: ddchenhao66 --- fastdeploy/worker/xpu_model_runner.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fastdeploy/worker/xpu_model_runner.py b/fastdeploy/worker/xpu_model_runner.py index e3c03b636..07dd0a3c8 100644 --- a/fastdeploy/worker/xpu_model_runner.py +++ b/fastdeploy/worker/xpu_model_runner.py @@ -704,7 +704,9 @@ class XPUModelRunner(ModelRunnerBase): dtype="int64", ) self.share_inputs["eos_token_id"] = paddle.full([self.model_config.eos_tokens_lens, 1], 0, dtype="int64") - self.share_inputs["top_p"] = paddle.full([max_num_seqs, 1], self.model_config.top_p, dtype="float32") + # self.share_inputs["top_p"] = paddle.full([max_num_seqs, 1], self.model_config.top_p, dtype="float32") + # self.share_inputs["top_p"] default to 0.0 on XPU for consideration of the performance + self.share_inputs["top_p"] = paddle.full([max_num_seqs, 1], 0.0, dtype="float32") self.share_inputs["top_k"] = paddle.full([max_num_seqs, 1], 0, dtype="int64") self.share_inputs["top_k_list"] = [0] * max_num_seqs self.share_inputs["min_p"] = paddle.full([max_num_seqs, 1], 0.0, dtype="float32")