[BugFix]Fix ep size (#3092)

* fix ep

* fix num_layer
This commit is contained in:
YuanRisheng
2025-07-30 21:03:12 +08:00
committed by GitHub
parent d17886de19
commit 7dfdd157ac
4 changed files with 10 additions and 1 deletions

View File

@@ -270,6 +270,8 @@ class ParallelConfig:
if hasattr(self, key):
setattr(self, key, value)
# currently, the expert parallel size is equal data parallel size
self.expert_parallel_size = self.data_parallel_size
self.use_ep = self.expert_parallel_size > 1
if self.splitwise_role == "mixed":
self.moe_phase = MoEPhase(phase="prefill")