From dab04ab4137f74537c486a0cd01021638c920883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E5=91=A8=E5=91=A8?= <39978853+zhoutianzi666@users.noreply.github.com> Date: Wed, 29 Oct 2025 21:50:25 +0800 Subject: [PATCH] add noaux_tc to unitest fused_moe (#4656) --- tests/layers/test_fusedmoe.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/layers/test_fusedmoe.py b/tests/layers/test_fusedmoe.py index 521005090..8037d9c31 100644 --- a/tests/layers/test_fusedmoe.py +++ b/tests/layers/test_fusedmoe.py @@ -83,8 +83,13 @@ class FuseMoEWrapper(paddle.nn.Layer): moe_intermediate_size=self.fd_config.model_config.moe_intermediate_size, num_experts=self.fd_config.model_config.moe_num_experts, top_k=self.fd_config.model_config.moe_k, - layer_idx=0, + # avoiding invoke clean_low_latency_buffer in mixed ep. + layer_idx=666, weight_key_map=weight_key_map, + topk_method="noaux_tc", + topk_group=4, + n_group=8, + gate_correction_bias=paddle.zeros([self.fd_config.model_config.moe_num_experts], paddle.float32), ) moe_layer = self.fused_moe