[GCU] Support gcu platform (#2702)

baseline: e7fa57ebae

Co-authored-by: yongqiangma <xing.wo@163.com>
This commit is contained in:
EnflameGCU
2025-07-08 13:00:52 +08:00
committed by GitHub
parent 26d5d737dd
commit d0f4d6ba3a
33 changed files with 2988 additions and 85 deletions

View File

@@ -19,7 +19,7 @@ from typing import Optional
import paddle
from paddle import nn
from paddle.incubate.nn.functional import fused_bias_act
from paddle.incubate.nn.functional import fused_bias_act, swiglu
from fastdeploy.config import FDConfig
from fastdeploy.platforms import current_platform
@@ -66,6 +66,8 @@ class SiluAndMul(nn.Layer):
if current_platform.is_cuda() or current_platform.is_xpu(
) or current_platform.is_iluvatar():
self.forward = self.forward_cuda
elif current_platform.is_gcu():
self.forward = self.forward_gcu
else:
raise NotImplementedError
@@ -123,3 +125,18 @@ class SiluAndMul(nn.Layer):
quant_max_bound=self.quant_max_bound,
quant_min_bound=self.quant_min_bound,
)
def forward_gcu(self, x):
"""
Forward propagation of the custom activation layer.
Args:
x (Tensor): Input tensor to the activation layer.
Returns:
Tensor: Output tensor.
"""
out = swiglu(x)
if self.bias is not None:
out = out + self.bias
return out