mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 17:17:14 +08:00
Simplify the Config code (#2770)
* simplify the code * fix vl * delete config * fix * perfect code * fix ci * fix xpu * fix xpu * fix server * resolve conflict * fix mtp * resolve conflict * fix xpu * fix xpu * fix vl * fix log * fix qwen moe * fix qwen moe * fix qwen moe
This commit is contained in:
@@ -67,10 +67,10 @@ class Attention(nn.Layer):
|
||||
ValueError: If the `v_head_dim` is less than 0.
|
||||
"""
|
||||
super().__init__()
|
||||
self.num_heads: int = fd_config.model_config.num_attention_heads // fd_config.parallel_config.tensor_parallel_degree
|
||||
self.num_heads: int = fd_config.model_config.num_attention_heads // fd_config.parallel_config.tensor_parallel_size
|
||||
self.head_dim: int = fd_config.model_config.head_dim
|
||||
self.kv_num_heads: int = \
|
||||
max(1, fd_config.model_config.num_key_value_heads // fd_config.parallel_config.tensor_parallel_degree)
|
||||
max(1, fd_config.model_config.num_key_value_heads // fd_config.parallel_config.tensor_parallel_size)
|
||||
self.layer_id: int = layer_id
|
||||
self.v_head_dim: int = v_head_dim if v_head_dim > 0 else self.head_dim
|
||||
self.rope_type: str = rope_type
|
||||
|
Reference in New Issue
Block a user