Rename PaddleBackend to PaddleInferBackend (#728)

This commit is contained in:
Jason
2022-11-28 21:29:09 +08:00
committed by GitHub
parent eea4320b48
commit 4351ce8665
25 changed files with 34 additions and 34 deletions

View File

@@ -120,7 +120,7 @@ def create_paddle_inference_runtime(model_dir,
use_fp16=False, use_fp16=False,
device_id=0): device_id=0):
option = fd.RuntimeOption() option = fd.RuntimeOption()
option.use_paddle_backend() option.use_paddle_infer_backend()
if device_id == -1: if device_id == -1:
option.use_cpu() option.use_cpu()
else: else:

View File

@@ -24,7 +24,7 @@ int main(int argc, char* argv[]) {
fd::RuntimeOption runtime_option; fd::RuntimeOption runtime_option;
runtime_option.SetModelPath(model_file, params_file, fd::ModelFormat::PADDLE); runtime_option.SetModelPath(model_file, params_file, fd::ModelFormat::PADDLE);
// CPU // CPU
runtime_option.UsePaddleBackend(); runtime_option.UsePaddleInferBackend();
runtime_option.SetCpuThreadNum(12); runtime_option.SetCpuThreadNum(12);
// GPU // GPU
// runtime_option.UseGpu(0); // runtime_option.UseGpu(0);

View File

@@ -26,7 +26,7 @@ option.set_model_path("mobilenetv2/inference.pdmodel",
# **** CPU 配置 **** # **** CPU 配置 ****
option.use_cpu() option.use_cpu()
option.use_paddle_backend() option.use_paddle_infer_backend()
option.set_cpu_thread_num(12) option.set_cpu_thread_num(12)
# **** GPU 配置 *** # **** GPU 配置 ***

View File

@@ -69,7 +69,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
if (FLAGS_backend == "onnx_runtime") { if (FLAGS_backend == "onnx_runtime") {
option->UseOrtBackend(); option->UseOrtBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "openvino") { } else if (FLAGS_backend == "openvino") {
option->UseOpenVINOBackend(); option->UseOpenVINOBackend();
} else if (FLAGS_backend == "tensorrt" || } else if (FLAGS_backend == "tensorrt" ||

View File

@@ -99,7 +99,7 @@ class ErnieForSequenceClassificationPredictor(object):
else: else:
option.use_gpu() option.use_gpu()
if args.backend == 'paddle': if args.backend == 'paddle':
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend == 'onnx_runtime': elif args.backend == 'onnx_runtime':
option.use_ort_backend() option.use_ort_backend()
elif args.backend == 'openvino': elif args.backend == 'openvino':

View File

@@ -50,7 +50,7 @@ int main(int argc, char* argv[]) {
} }
switch (backend_type) { switch (backend_type) {
case 0: case 0:
option.UsePaddleBackend(); option.UsePaddleInferBackend();
break; break;
case 1: case 1:
option.UseOrtBackend(); option.UseOrtBackend();

View File

@@ -57,7 +57,7 @@ def build_option(args):
if args.backend == 'onnx_runtime': if args.backend == 'onnx_runtime':
runtime_option.use_ort_backend() runtime_option.use_ort_backend()
elif args.backend == 'paddle_inference': elif args.backend == 'paddle_inference':
runtime_option.use_paddle_backend() runtime_option.use_paddle_infer_backend()
elif args.backend == 'openvino': elif args.backend == 'openvino':
runtime_option.use_openvino_backend() runtime_option.use_openvino_backend()
runtime_option.set_cpu_thread_num(args.cpu_num_threads) runtime_option.set_cpu_thread_num(args.cpu_num_threads)

View File

@@ -78,7 +78,7 @@ class TritonPythonModel:
for option in options: for option in options:
if option['name'] == 'paddle': if option['name'] == 'paddle':
runtime_option.use_paddle_backend() runtime_option.use_paddle_infer_backend()
elif option['name'] == 'onnxruntime': elif option['name'] == 'onnxruntime':
runtime_option.use_ort_backend() runtime_option.use_ort_backend()
elif option['name'] == 'openvino': elif option['name'] == 'openvino':

View File

@@ -56,7 +56,7 @@ def build_option(args):
elif args.backend.lower() == "ort": elif args.backend.lower() == "ort":
option.use_ort_backend() option.use_ort_backend()
elif args.backend.lower() == "paddle": elif args.backend.lower() == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino": elif args.backend.lower() == "openvino":
assert args.device.lower( assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU." ) == "cpu", "OpenVINO backend require inference on device CPU."

View File

@@ -78,7 +78,7 @@ int main(int argc, char* argv[]) {
} }
else if (flag == 3) { else if (flag == 3) {
option.UseCpu(); option.UseCpu();
option.UsePaddleBackend(); option.UsePaddleInferBackend();
} }
std::string model_dir = argv[1]; std::string model_dir = argv[1];

View File

@@ -57,7 +57,7 @@ def build_option(args):
elif args.backend.lower() == "ort": elif args.backend.lower() == "ort":
option.use_ort_backend() option.use_ort_backend()
elif args.backend.lower() == "paddle": elif args.backend.lower() == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino": elif args.backend.lower() == "openvino":
assert args.device.lower( assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU." ) == "cpu", "OpenVINO backend require inference on device CPU."

View File

@@ -40,7 +40,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
if (FLAGS_backend == "ort") { if (FLAGS_backend == "ort") {
option->UseOrtBackend(); option->UseOrtBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "trt" || } else if (FLAGS_backend == "trt" ||
FLAGS_backend == "paddle_trt") { FLAGS_backend == "paddle_trt") {
option->UseTrtBackend(); option->UseTrtBackend();
@@ -63,7 +63,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
} else if (FLAGS_backend == "ov") { } else if (FLAGS_backend == "ov") {
option->UseOpenVINOBackend(); option->UseOpenVINOBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "default") { } else if (FLAGS_backend == "default") {
return true; return true;
} else { } else {

View File

@@ -38,7 +38,7 @@ def build_option(args):
if backend == "ort": if backend == "ort":
option.use_ort_backend() option.use_ort_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend in ["trt", "paddle_trt"]: elif backend in ["trt", "paddle_trt"]:
option.use_trt_backend() option.use_trt_backend()
option.set_trt_input_shape("input", [1, 3, 112, 112]) option.set_trt_input_shape("input", [1, 3, 112, 112])
@@ -58,7 +58,7 @@ def build_option(args):
elif backend == "ov": elif backend == "ov":
option.use_openvino_backend() option.use_openvino_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend == "default": elif backend == "default":
return option return option
else: else:

View File

@@ -40,7 +40,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
if (FLAGS_backend == "ort") { if (FLAGS_backend == "ort") {
option->UseOrtBackend(); option->UseOrtBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "trt" || } else if (FLAGS_backend == "trt" ||
FLAGS_backend == "paddle_trt") { FLAGS_backend == "paddle_trt") {
option->UseTrtBackend(); option->UseTrtBackend();
@@ -63,7 +63,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
} else if (FLAGS_backend == "ov") { } else if (FLAGS_backend == "ov") {
option->UseOpenVINOBackend(); option->UseOpenVINOBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "default") { } else if (FLAGS_backend == "default") {
return true; return true;
} else { } else {

View File

@@ -37,7 +37,7 @@ def build_option(args):
if backend == "ort": if backend == "ort":
option.use_ort_backend() option.use_ort_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend in ["trt", "paddle_trt"]: elif backend in ["trt", "paddle_trt"]:
option.use_trt_backend() option.use_trt_backend()
option.set_trt_input_shape("input", [1, 3, 112, 112]) option.set_trt_input_shape("input", [1, 3, 112, 112])
@@ -57,7 +57,7 @@ def build_option(args):
elif backend == "ov": elif backend == "ov":
option.use_openvino_backend() option.use_openvino_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend == "default": elif backend == "default":
return option return option
else: else:

View File

@@ -41,7 +41,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
if (FLAGS_backend == "ort") { if (FLAGS_backend == "ort") {
option->UseOrtBackend(); option->UseOrtBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "trt" || } else if (FLAGS_backend == "trt" ||
FLAGS_backend == "paddle_trt") { FLAGS_backend == "paddle_trt") {
option->UseTrtBackend(); option->UseTrtBackend();
@@ -64,7 +64,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
} else if (FLAGS_backend == "ov") { } else if (FLAGS_backend == "ov") {
option->UseOpenVINOBackend(); option->UseOpenVINOBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "default") { } else if (FLAGS_backend == "default") {
return true; return true;
} else { } else {

View File

@@ -42,7 +42,7 @@ def build_option(args):
if backend == "ort": if backend == "ort":
option.use_ort_backend() option.use_ort_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend in ["trt", "paddle_trt"]: elif backend in ["trt", "paddle_trt"]:
option.use_trt_backend() option.use_trt_backend()
option.set_trt_input_shape("input", [1, 3, 112, 112]) option.set_trt_input_shape("input", [1, 3, 112, 112])
@@ -62,7 +62,7 @@ def build_option(args):
elif backend == "ov": elif backend == "ov":
option.use_openvino_backend() option.use_openvino_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend == "default": elif backend == "default":
return option return option
else: else:

View File

@@ -40,7 +40,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
if (FLAGS_backend == "ort") { if (FLAGS_backend == "ort") {
option->UseOrtBackend(); option->UseOrtBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "trt" || } else if (FLAGS_backend == "trt" ||
FLAGS_backend == "paddle_trt") { FLAGS_backend == "paddle_trt") {
option->UseTrtBackend(); option->UseTrtBackend();
@@ -63,7 +63,7 @@ bool CreateRuntimeOption(fastdeploy::RuntimeOption* option) {
} else if (FLAGS_backend == "ov") { } else if (FLAGS_backend == "ov") {
option->UseOpenVINOBackend(); option->UseOpenVINOBackend();
} else if (FLAGS_backend == "paddle") { } else if (FLAGS_backend == "paddle") {
option->UsePaddleBackend(); option->UsePaddleInferBackend();
} else if (FLAGS_backend == "default") { } else if (FLAGS_backend == "default") {
return true; return true;
} else { } else {

View File

@@ -37,7 +37,7 @@ def build_option(args):
if backend == "ort": if backend == "ort":
option.use_ort_backend() option.use_ort_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend in ["trt", "paddle_trt"]: elif backend in ["trt", "paddle_trt"]:
option.use_trt_backend() option.use_trt_backend()
option.set_trt_input_shape("input", [1, 3, 64, 64]) option.set_trt_input_shape("input", [1, 3, 64, 64])
@@ -57,7 +57,7 @@ def build_option(args):
elif backend == "ov": elif backend == "ov":
option.use_openvino_backend() option.use_openvino_backend()
elif backend == "paddle": elif backend == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif backend == "default": elif backend == "default":
return option return option
else: else:

View File

@@ -60,7 +60,7 @@ void GpuInfer(const std::string& model_dir, const std::string& image_file,
auto option = fastdeploy::RuntimeOption(); auto option = fastdeploy::RuntimeOption();
option.UseGpu(); option.UseGpu();
option.UsePaddleBackend(); option.UsePaddleInferBackend();
auto model = fastdeploy::vision::matting::PPMatting(model_file, params_file, auto model = fastdeploy::vision::matting::PPMatting(model_file, params_file,
config_file, option); config_file, option);
if (!model.Initialized()) { if (!model.Initialized()) {

View File

@@ -34,7 +34,7 @@ def build_option(args):
option = fd.RuntimeOption() option = fd.RuntimeOption()
if args.device.lower() == "gpu": if args.device.lower() == "gpu":
option.use_gpu() option.use_gpu()
option.use_paddle_backend() option.use_paddle_infer_backend()
if args.use_trt: if args.use_trt:
option.use_trt_backend() option.use_trt_backend()

View File

@@ -81,7 +81,7 @@ def build_option(args):
elif args.backend.lower() == "ort": elif args.backend.lower() == "ort":
option.use_ort_backend() option.use_ort_backend()
elif args.backend.lower() == "paddle": elif args.backend.lower() == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino": elif args.backend.lower() == "openvino":
assert args.device.lower( assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU." ) == "cpu", "OpenVINO backend require inference on device CPU."

View File

@@ -81,7 +81,7 @@ def build_option(args):
elif args.backend.lower() == "ort": elif args.backend.lower() == "ort":
option.use_ort_backend() option.use_ort_backend()
elif args.backend.lower() == "paddle": elif args.backend.lower() == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino": elif args.backend.lower() == "openvino":
assert args.device.lower( assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU." ) == "cpu", "OpenVINO backend require inference on device CPU."

View File

@@ -57,7 +57,7 @@ void InitAndInfer(const std::string& model_dir, const std::string& image_file,
// fastdeploy::RuntimeOption option; // fastdeploy::RuntimeOption option;
// option.UseCpu(); // option.UseCpu();
// option.UsePaddleBackend(); // option.UsePaddleInferBackend();
// std::cout<<"Xyy-debug, enable Paddle Backend==!"; // std::cout<<"Xyy-debug, enable Paddle Backend==!";
// std::string model_dir = argv[1]; // std::string model_dir = argv[1];
@@ -89,7 +89,7 @@ int main(int argc, char* argv[]) {
std::cout<<"Use ORT!"<<std::endl; std::cout<<"Use ORT!"<<std::endl;
} else if (flag == 1) { } else if (flag == 1) {
option.UseCpu(); option.UseCpu();
option.UsePaddleBackend(); option.UsePaddleInferBackend();
std::cout<<"Use PP!"<<std::endl; std::cout<<"Use PP!"<<std::endl;
} }

View File

@@ -52,7 +52,7 @@ def build_option(args):
elif args.backend.lower() == "ort": elif args.backend.lower() == "ort":
option.use_ort_backend() option.use_ort_backend()
elif args.backend.lower() == "paddle": elif args.backend.lower() == "paddle":
option.use_paddle_backend() option.use_paddle_infer_backend()
elif args.backend.lower() == "openvino": elif args.backend.lower() == "openvino":
assert args.device.lower( assert args.device.lower(
) == "cpu", "OpenVINO backend require inference on device CPU." ) == "cpu", "OpenVINO backend require inference on device CPU."