This commit is contained in:
chen
2025-08-30 16:04:20 +08:00
committed by GitHub
parent 455205f991
commit 7568b20098
2 changed files with 2 additions and 2 deletions

View File

@@ -377,7 +377,7 @@ class LLMEngine:
"PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "python",
"FLAGS_use_append_attn": 1,
"NCCL_ALGO": "Ring",
"FLAGS_max_partition_size": int(os.getenv("FLAGS_max_partition_size", 32768)),
"FLAGS_max_partition_size": int(os.getenv("FLAGS_max_partition_size", 1024)),
"FLAGS_hardamard_moe_block_size": int(os.getenv("FLAGS_hardamard_moe_block_size", 128)),
"FLAGS_hardamard_use_diagonal_block_matrix": int(
os.getenv("FLAGS_hardamard_use_diagonal_block_matrix", 0)

View File

@@ -113,7 +113,7 @@ class AppendAttentionBackend(AttentionBackend):
self.group_size: int = self.num_heads // self.kv_num_heads
self.head_dim: int = fd_config.model_config.head_dim
self.num_layers: int = fd_config.model_config.num_hidden_layers
self.max_partition_size: int = int(os.getenv("FLAGS_max_partition_size", 32768))
self.max_partition_size: int = int(os.getenv("FLAGS_max_partition_size", 1024))
self.encoder_block_shape_q: int = encoder_block_shape_q
self.decoder_block_shape_q: int = decoder_block_shape_q