mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 17:17:14 +08:00
[Backend] Add OCR、Seg、 KeypointDetection、Matting、 ernie-3.0 and adaface models for XPU Deploy (#960)
* [FlyCV] Bump up FlyCV -> official release 1.0.0 * add seg models for XPU * add ocr model for XPU * add matting * add matting python * fix infer.cc * add keypointdetection support for XPU * Add adaface support for XPU * add ernie-3.0 * fix doc Co-authored-by: DefTruth <qiustudent_r@163.com> Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com>
This commit is contained in:
5
examples/text/ernie-3.0/python/README.md
Normal file → Executable file
5
examples/text/ernie-3.0/python/README.md
Normal file → Executable file
@@ -22,7 +22,7 @@ pip install -r requirements.txt
|
||||
|
||||
### A Quick Start
|
||||
|
||||
The following example shows how to employ FastDeploy library to complete Python predictive deployment of ERNIE 3.0 Medium model on [AFQMC Dataset](https://bj.bcebos.com/paddlenlp/datasets/afqmc_public.zip)of CLUE Benchmark for text classification tasks.
|
||||
The following example shows how to employ FastDeploy library to complete Python predictive deployment of ERNIE 3.0 Medium model on [AFQMC Dataset](https://bj.bcebos.com/paddlenlp/datasets/afqmc_public.zip)of CLUE Benchmark for text classification tasks.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -40,6 +40,9 @@ python seq_cls_infer.py --device cpu --model_dir ernie-3.0-medium-zh-afqmc
|
||||
# GPU Inference
|
||||
python seq_cls_infer.py --device gpu --model_dir ernie-3.0-medium-zh-afqmc
|
||||
|
||||
# XPU Inference
|
||||
python seq_cls_infer.py --device xpu --model_dir ernie-3.0-medium-zh-afqmc
|
||||
|
||||
```
|
||||
The result returned after running is as follows:
|
||||
|
||||
|
8
examples/text/ernie-3.0/python/seq_cls_infer.py
Normal file → Executable file
8
examples/text/ernie-3.0/python/seq_cls_infer.py
Normal file → Executable file
@@ -35,8 +35,8 @@ def parse_arguments():
|
||||
"--device",
|
||||
type=str,
|
||||
default='cpu',
|
||||
choices=['gpu', 'cpu'],
|
||||
help="Type of inference device, support 'cpu' or 'gpu'.")
|
||||
choices=['gpu', 'cpu', 'xpu'],
|
||||
help="Type of inference device, support 'cpu', 'xpu' or 'gpu'.")
|
||||
parser.add_argument(
|
||||
"--backend",
|
||||
type=str,
|
||||
@@ -94,6 +94,10 @@ class ErnieForSequenceClassificationPredictor(object):
|
||||
model_path = os.path.join(args.model_dir, "infer.pdmodel")
|
||||
params_path = os.path.join(args.model_dir, "infer.pdiparams")
|
||||
option.set_model_path(model_path, params_path)
|
||||
if args.device == 'xpu':
|
||||
option.use_xpu()
|
||||
option.use_paddle_lite_backend()
|
||||
return fd.Runtime(option)
|
||||
if args.device == 'cpu':
|
||||
option.use_cpu()
|
||||
else:
|
||||
|
Reference in New Issue
Block a user