[BugFix] Fix vocab size error for ernie model

This commit is contained in:
Jiang-Jia-Jun
2025-07-09 22:33:04 +08:00
parent 337d76f094
commit 1fe37cb7e8

View File

@@ -707,7 +707,8 @@ def initialize_fd_config(config) -> FDConfig:
# Handle vocabulary size
model_config.ori_vocab_size = model_config_dict.get("vocab_size", -1)
if "Ernie4_5_ForCausalLM" in model_config_dict.get("architectures", []):
archs = model_config_dict.get("architectures", [])
if "Ernie4_5_ForCausalLM" in archs or "Ernie4_5_MoeForCausalLM" in archs:
model_config.ori_vocab_size = getattr(config, 'ori_vocab_size', model_config.ori_vocab_size)
# Handle DeepseekV3 specific config