mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 16:48:03 +08:00
[Backend] support ipu in paddle inference backend. (#437)
* feat(ipu): add ipu support for paddle_infer backend. * fix(): remove unused env. * fix(ipu): simplify user API for IPU. * fix(cmake): fix merge conflict error in CMakeList. Co-authored-by: Jason <jiangjiajun@baidu.com>
This commit is contained in:
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'cpu' or 'gpu' or 'ipu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -32,6 +32,9 @@ def build_option(args):
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
if args.device.lower() == "ipu":
|
||||
option.use_ipu()
|
||||
|
||||
if args.use_trt:
|
||||
option.use_trt_backend()
|
||||
return option
|
||||
|
Reference in New Issue
Block a user