[BugFix] fix RMSNorm rms_norm_esp (#2804)

This commit is contained in:
lizexu123
2025-07-10 20:39:02 +08:00
committed by GitHub
parent 823a47e64a
commit e681e1e719
7 changed files with 18 additions and 16 deletions

View File

@@ -161,14 +161,14 @@ class Qwen2DecoderLayer(nn.Layer):
self.input_layernorm = RMSNorm(
fd_config,
hidden_size=fd_config.model_config.hidden_size,
eps=1e-6,
eps=fd_config.model_config.rms_norm_eps,
prefix=f"{prefix}.input_layernorm",
)
self.post_attention_layernorm = RMSNorm(
fd_config,
hidden_size=fd_config.model_config.hidden_size,
eps=1e-6,
eps=fd_config.model_config.rms_norm_eps,
prefix=f"{prefix}.post_attention_layernorm",
)
@@ -248,7 +248,7 @@ class Qwen2Model(nn.Layer):
self.norm = RMSNorm(
fd_config,
hidden_size=fd_config.model_config.hidden_size,
eps=1e-5,
eps=fd_config.model_config.rms_norm_eps,
prefix=f"{fd_config.model_config.prefix_name}.norm",
)