[Bug fix] Fixed the garbled text issues in Qwen3-8B (#2737)

* fix qwen3.py

* update

* update lm_head tie_word_embeddings

* update tie_word_embeddings

* fix

* fix tie_word_embedding not in config.json

---------

Co-authored-by: lizexu <lizexu@baidu.com>
This commit is contained in:
lizexu123
2025-07-08 14:15:27 +08:00
committed by GitHub
parent d0f4d6ba3a
commit 525be243e7
2 changed files with 10 additions and 5 deletions

View File

@@ -577,12 +577,15 @@ def initialize_fd_config(config_or_args) -> FDConfig:
# Get model config from model directory
model_config_dict, _ = ModelConfig.get_config_dict(config_or_args.model_name_or_path)
# Handle MoE related configs
if 'num_experts' in model_config_dict:
model_config_dict['moe_num_experts'] = model_config_dict.pop('num_experts')
if 'num_experts_per_tok' in model_config_dict:
model_config_dict['moe_topk'] = model_config_dict.pop('num_experts_per_tok')
# Set default values for model config
model_config_dict["head_dim"] = model_config_dict.get(
"head_dim", model_config_dict["hidden_size"] // model_config_dict["num_attention_heads"])
@@ -592,6 +595,8 @@ def initialize_fd_config(config_or_args) -> FDConfig:
model_config = ModelConfig.from_dict(model_config_dict)
model_config.head_dim = model_config_dict["head_dim"]
paddle.set_default_dtype(config_or_args.dtype)
if 'tie_word_embeddings' in model_config_dict:
model_config_dict['tie_word_embeddings'] = model_config_dict.pop('tie_word_embeddings')
# Initialize all config components
device_config = DeviceConfig()