mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 00:33:03 +08:00
[Backend]Add stable_diffusion and detection models support for KunlunXin XPU (#954)
* [FlyCV] Bump up FlyCV -> official release 1.0.0 * add valid_xpu for detection * add paddledetection model support for xpu * support all detection model in c++ and python * fix code * add python stable_diffusion support Co-authored-by: DefTruth <qiustudent_r@163.com> Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com>
This commit is contained in:
2
examples/vision/detection/paddledetection/python/README.md
Normal file → Executable file
2
examples/vision/detection/paddledetection/python/README.md
Normal file → Executable file
@@ -23,6 +23,8 @@ python infer_ppyoloe.py --model_dir ppyoloe_crn_l_300e_coco --image 000000014439
|
||||
python infer_ppyoloe.py --model_dir ppyoloe_crn_l_300e_coco --image 000000014439.jpg --device gpu
|
||||
# GPU上使用TensorRT推理 (注意:TensorRT推理第一次运行,有序列化模型的操作,有一定耗时,需要耐心等待)
|
||||
python infer_ppyoloe.py --model_dir ppyoloe_crn_l_300e_coco --image 000000014439.jpg --device gpu --use_trt True
|
||||
# 昆仑芯XPU推理
|
||||
python infer_ppyoloe.py --model_dir ppyoloe_crn_l_300e_coco --image 000000014439.jpg --device xpu
|
||||
```
|
||||
|
||||
运行完成可视化结果如下图所示
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_faster_rcnn.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_faster_rcnn.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu(autotune=False, l3_workspace_size=0)
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_mask_rcnn.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_mask_rcnn.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu(autotune=False, l3_workspace_size=0)
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
# option.use_gpu()
|
||||
print(
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_picodet.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_picodet.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_ppyolo.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_ppyolo.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_ppyoloe.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_ppyoloe.py
Normal file → Executable file
@@ -18,7 +18,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -30,6 +30,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_rtmdet.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_rtmdet.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
11
examples/vision/detection/paddledetection/python/infer_ssd.py
Normal file → Executable file
11
examples/vision/detection/paddledetection/python/infer_ssd.py
Normal file → Executable file
@@ -17,12 +17,15 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
return option
|
||||
@@ -36,8 +39,10 @@ config_file = os.path.join(args.model_dir, "infer_cfg.yml")
|
||||
|
||||
# 配置runtime,加载模型
|
||||
runtime_option = build_option(args)
|
||||
model = fd.vision.detection.SSD(
|
||||
model_file, params_file, config_file, runtime_option=runtime_option)
|
||||
model = fd.vision.detection.SSD(model_file,
|
||||
params_file,
|
||||
config_file,
|
||||
runtime_option=runtime_option)
|
||||
|
||||
# 预测图片检测结果
|
||||
im = cv2.imread(args.image)
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_yolov3.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_yolov3.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_yolov5.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_yolov5.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_yolov6.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_yolov6.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
7
examples/vision/detection/paddledetection/python/infer_yolov7.py
Normal file → Executable file
7
examples/vision/detection/paddledetection/python/infer_yolov7.py
Normal file → Executable file
@@ -17,18 +17,21 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
default=False,
|
||||
help="Wether to use tensorrt.")
|
||||
help="Wether to use tensorrt.")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
5
examples/vision/detection/paddledetection/python/infer_yolox.py
Normal file → Executable file
5
examples/vision/detection/paddledetection/python/infer_yolox.py
Normal file → Executable file
@@ -17,7 +17,7 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
help="Type of inference device, support 'xpu', 'cpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--use_trt",
|
||||
type=ast.literal_eval,
|
||||
@@ -29,6 +29,9 @@ def parse_arguments():
|
||||
def build_option(args):
|
||||
option = fd.RuntimeOption()
|
||||
|
||||
if args.device.lower() == "xpu":
|
||||
option.use_xpu()
|
||||
|
||||
if args.device.lower() == "gpu":
|
||||
option.use_gpu()
|
||||
|
||||
|
Reference in New Issue
Block a user