mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-04 00:06:38 +08:00
[Bug fix] Fixed the garbled text issues in Qwen3-8B (#2737)
* fix qwen3.py * update * update lm_head tie_word_embeddings * update tie_word_embeddings * fix * fix tie_word_embedding not in config.json --------- Co-authored-by: lizexu <lizexu@baidu.com>
This commit is contained in:
@@ -577,12 +577,15 @@ def initialize_fd_config(config_or_args) -> FDConfig:
|
||||
# Get model config from model directory
|
||||
model_config_dict, _ = ModelConfig.get_config_dict(config_or_args.model_name_or_path)
|
||||
|
||||
|
||||
|
||||
# Handle MoE related configs
|
||||
if 'num_experts' in model_config_dict:
|
||||
model_config_dict['moe_num_experts'] = model_config_dict.pop('num_experts')
|
||||
if 'num_experts_per_tok' in model_config_dict:
|
||||
model_config_dict['moe_topk'] = model_config_dict.pop('num_experts_per_tok')
|
||||
|
||||
|
||||
# Set default values for model config
|
||||
model_config_dict["head_dim"] = model_config_dict.get(
|
||||
"head_dim", model_config_dict["hidden_size"] // model_config_dict["num_attention_heads"])
|
||||
@@ -592,6 +595,8 @@ def initialize_fd_config(config_or_args) -> FDConfig:
|
||||
model_config = ModelConfig.from_dict(model_config_dict)
|
||||
model_config.head_dim = model_config_dict["head_dim"]
|
||||
paddle.set_default_dtype(config_or_args.dtype)
|
||||
if 'tie_word_embeddings' in model_config_dict:
|
||||
model_config_dict['tie_word_embeddings'] = model_config_dict.pop('tie_word_embeddings')
|
||||
|
||||
# Initialize all config components
|
||||
device_config = DeviceConfig()
|
||||
|
Reference in New Issue
Block a user