[Intel HPU] Support intel hpu platform (#4161)

* [Intel HPU] Support intel hpu platform

* fix some issues

* apply precommit and move AttentionBackend_HPU

* fix format issue

* correct ops import

* fix ci issue

* update code in layers

* fix code style issue

* remove dense tp moe ep mode

* fix enc_dec_block_num

* fix rebase issue

* rename hpu to gaudi in readme

* rename ForwardMeta_HPU to HPUForwardMeta
This commit is contained in:
fmiao2372
2025-09-24 12:27:50 +08:00
committed by GitHub
parent a1c5d930bb
commit f1b5392e20
35 changed files with 2814 additions and 19 deletions

View File

@@ -72,6 +72,8 @@ class SiluAndMul(nn.Layer):
self.forward = self.forward_cuda
elif current_platform.is_gcu():
self.forward = self.forward_gcu
elif current_platform.is_intel_hpu():
self.forward = self.forward_intel_hpu
else:
raise NotImplementedError
@@ -147,6 +149,16 @@ class SiluAndMul(nn.Layer):
out = out + self.bias
return out
def forward_intel_hpu(self, x):
"""
Forward propagation of the custom activation layer.
Args:
x (Tensor): Input tensor to the activation layer.
Returns:
Tensor: Output tensor.
"""
return
def get_act_fn(act_fn_name: str) -> nn.Layer:
"""Get an activation function by name."""