[MTP]Support RL reshard (#4074)
Some checks failed
CE Compile Job / ce_job_pre_check (push) Has been cancelled
CE Compile Job / print_ce_job_pre_check_outputs (push) Has been cancelled
CE Compile Job / FD-Clone-Linux (push) Has been cancelled
CE Compile Job / Show Code Archive Output (push) Has been cancelled
CE Compile Job / BUILD_SM8090 (push) Has been cancelled
CE Compile Job / BUILD_SM8689 (push) Has been cancelled
CE Compile Job / CE_UPLOAD (push) Has been cancelled

* support rl reshard

* modify model name
This commit is contained in:
freeliuzc
2025-09-15 11:47:06 +08:00
committed by GitHub
parent 2883746132
commit d2ab369427
3 changed files with 4 additions and 0 deletions

View File

@@ -132,6 +132,7 @@ class ModelConfig:
self.eos_tokens_lens: int = 2
self.lm_head_fp32: bool = False
self.model_format = "auto"
self.num_nextn_predict_layers = 0
for key, value in args.items():
if hasattr(self, key):
setattr(self, key, value)

View File

@@ -24,6 +24,7 @@ class MultimodalRegistry:
"Ernie4_5_VLMoeForConditionalGeneration",
"Ernie5MoeForCausalLM",
"Qwen2_5_VLForConditionalGeneration",
"Ernie5ForCausalLM",
}
@classmethod

View File

@@ -61,6 +61,7 @@ class RolloutModelConfig:
local_rank: int = 0,
moba_attention_config: str = None,
data_parallel_size: int = 1,
num_nextn_predict_layers: int = 0,
):
# Required parameters
self.model = model_name_or_path
@@ -107,6 +108,7 @@ class RolloutModelConfig:
self.early_stop_config = early_stop_config
self.ips = None
self.moba_attention_config = moba_attention_config
self.num_nextn_predict_layers = num_nextn_predict_layers
def __str__(self):
return "\n".join(f"{k}: {v}" for k, v in self.__dict__.items())