[Other] Change all XPU to KunlunXin (#973)

* [FlyCV] Bump up FlyCV -> official release 1.0.0

* XPU to KunlunXin

* update

* update model link

* update doc

* update device

* update code

* useless code

Co-authored-by: DefTruth <qiustudent_r@163.com>
Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com>
This commit is contained in:
yeliang2258
2022-12-27 10:02:02 +08:00
committed by GitHub
parent 6078bd9657
commit 45865c8724
111 changed files with 369 additions and 368 deletions

View File

@@ -52,7 +52,7 @@ bool FastDeployModel::InitRuntimeWithSpecifiedBackend() {
bool use_rknpu = (runtime_option.device == Device::RKNPU);
bool use_timvx = (runtime_option.device == Device::TIMVX);
bool use_ascend = (runtime_option.device == Device::ASCEND);
bool use_xpu = (runtime_option.device == Device::XPU);
bool use_kunlunxin = (runtime_option.device == Device::KUNLUNXIN);
if (use_gpu) {
if (!IsSupported(valid_gpu_backends, runtime_option.backend)) {
@@ -74,9 +74,9 @@ bool FastDeployModel::InitRuntimeWithSpecifiedBackend() {
FDERROR << "The valid ascend backends of model " << ModelName() << " are " << Str(valid_ascend_backends) << ", " << runtime_option.backend << " is not supported." << std::endl;
return false;
}
} else if (use_xpu) {
if (!IsSupported(valid_xpu_backends, runtime_option.backend)) {
FDERROR << "The valid xpu backends of model " << ModelName() << " are " << Str(valid_xpu_backends) << ", " << runtime_option.backend << " is not supported." << std::endl;
} else if (use_kunlunxin) {
if (!IsSupported(valid_kunlunxin_backends, runtime_option.backend)) {
FDERROR << "The valid kunlunxin backends of model " << ModelName() << " are " << Str(valid_kunlunxin_backends) << ", " << runtime_option.backend << " is not supported." << std::endl;
return false;
}
} else if(use_ipu) {
@@ -116,8 +116,8 @@ bool FastDeployModel::InitRuntimeWithSpecifiedDevice() {
return CreateTimVXBackend();
} else if (runtime_option.device == Device::ASCEND) {
return CreateASCENDBackend();
} else if (runtime_option.device == Device::XPU) {
return CreateXPUBackend();
} else if (runtime_option.device == Device::KUNLUNXIN) {
return CreateKunlunXinBackend();
} else if (runtime_option.device == Device::IPU) {
#ifdef WITH_IPU
return CreateIpuBackend();
@@ -127,7 +127,7 @@ bool FastDeployModel::InitRuntimeWithSpecifiedDevice() {
return false;
#endif
}
FDERROR << "Only support CPU/GPU/IPU/RKNPU/TIMVX/XPU/ASCEND now." << std::endl;
FDERROR << "Only support CPU/GPU/IPU/RKNPU/TIMVX/KunlunXin/ASCEND now." << std::endl;
return false;
}
@@ -241,18 +241,18 @@ bool FastDeployModel::CreateTimVXBackend() {
return false;
}
bool FastDeployModel::CreateXPUBackend() {
if (valid_xpu_backends.size() == 0) {
FDERROR << "There's no valid xpu backends for model: " << ModelName()
bool FastDeployModel::CreateKunlunXinBackend() {
if (valid_kunlunxin_backends.size() == 0) {
FDERROR << "There's no valid KunlunXin backends for model: " << ModelName()
<< std::endl;
return false;
}
for (size_t i = 0; i < valid_xpu_backends.size(); ++i) {
if (!IsBackendAvailable(valid_xpu_backends[i])) {
for (size_t i = 0; i < valid_kunlunxin_backends.size(); ++i) {
if (!IsBackendAvailable(valid_kunlunxin_backends[i])) {
continue;
}
runtime_option.backend = valid_xpu_backends[i];
runtime_option.backend = valid_kunlunxin_backends[i];
runtime_ = std::unique_ptr<Runtime>(new Runtime());
if (!runtime_->Init(runtime_option)) {
return false;