mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-09-26 20:41:53 +08:00
[Feature] multi source download (#3005)
* multi-source download * multi-source download * huggingface download revision * requirement * style * add revision arg * test * pre-commit * Change default download * change requirements.txt * modify English Documentation * documentation
This commit is contained in:
@@ -1,14 +1,15 @@
|
||||
# Supported Models
|
||||
|
||||
FastDeploy currently supports the following models, which can be downloaded via three methods:
|
||||
FastDeploy currently supports the following models, which can be downloaded automatically during FastDeploy deployment.Specify the ``model`` parameter as the model name in the table below to automatically download model weights (all supports resumable downloads). The following three download sources are supported:
|
||||
|
||||
- 1. During FastDeploy deployment, specify the ``model`` parameter as the model name in the table below to automatically download model weights from AIStudio (supports resumable downloads)
|
||||
- 1. Search for corresponding Paddle-version ERNIE models on [AIStudio/PaddlePaddle](https://aistudio.baidu.com/modelsoverview), e.g., `ERNIE-4.5-0.3B-Paddle`
|
||||
- 2. Download Paddle-version ERNIE models from [HuggingFace/baidu/models](https://huggingface.co/baidu/models), e.g., `baidu/ERNIE-4.5-0.3B-Paddle`
|
||||
- 3. Search for corresponding Paddle-version ERNIE models on [ModelScope/PaddlePaddle](https://www.modelscope.cn/models?name=PaddlePaddle&page=1&tabKey=task), e.g., `ERNIE-4.5-0.3B-Paddle`
|
||||
|
||||
For the first method (auto-download), the default download path is ``~/`` (user home directory). Users can modify this path by setting the ``FD_MODEL_CACHE`` environment variable, e.g.:
|
||||
When using automatic download, the default download source is AIStudio. Users can modify the default download source by setting the ``FD_MODEL_SOURCE`` environment variable, which can be set to “AISTUDIO”, ‘MODELSCOPE’ or “HUGGINGFACE”. The default download path is ``~/`` (i.e., the user's home directory). Users can modify the default download path by setting the ``FD_MODEL_CACHE`` environment variable, e.g.:
|
||||
|
||||
```bash
|
||||
export FD_MODEL_SOURCE=AISTUDIO # "AISTUDIO", "MODELSCOPE" or "HUGGINGFACE"
|
||||
export FD_MODEL_CACHE=/ssd1/download_models
|
||||
```
|
||||
|
||||
|
@@ -1,14 +1,15 @@
|
||||
# 支持模型列表
|
||||
|
||||
FastDeploy目前支持模型列表如下,以下模型提供如下3种下载方式,
|
||||
FastDeploy目前支持模型列表如下,在FastDeploy部署时,指定 ``model``参数为如下表格中的模型名,即可自动下载模型权重(均支持断点续传),支持如下3种下载源,
|
||||
|
||||
- 1. 在FastDeploy部署时,指定 ``model``参数为如下表格中的模型名,即可自动从AIStudio下载模型权重(支持断点续传)
|
||||
- 2. [HuggingFace/baidu/models](https://huggingface.co/baidu/models) 下载Paddle后缀ERNIE模型,如baidu/ERNIE-4.5-0.3B-Paddle
|
||||
- 3. [ModelScope/PaddlePaddle](https://www.modelscope.cn/models?name=PaddlePaddle&page=1&tabKey=task) 搜索相应Paddle后缀ERNIE模型,如ERNIE-4.5-0.3B-Paddle
|
||||
- 1. [AIStudio/PaddlePaddle](https://aistudio.baidu.com/modelsoverview) 搜索相应Paddle后缀ERNIE模型,如ERNIE-4.5-0.3B-Paddle
|
||||
- 2. [ModelScope/PaddlePaddle](https://www.modelscope.cn/models?name=PaddlePaddle&page=1&tabKey=task) 搜索相应Paddle后缀ERNIE模型,如ERNIE-4.5-0.3B-Paddle
|
||||
- 3. [HuggingFace/baidu/models](https://huggingface.co/baidu/models) 下载Paddle后缀ERNIE模型,如baidu/ERNIE-4.5-0.3B-Paddle
|
||||
|
||||
其中第一种方式自动下载时,默认下载路径为 ``~/``(即用户主目录),用户可以通过配置环境变量 ``FD_MODEL_CACHE``修改默认下载的路径,例如
|
||||
使用自动下载时,默认从AIStudio下载,用户可以通过配置环境变量 ``FD_MODEL_SOURCE``修改默认下载来源,可取值"AISTUDIO","MODELSCOPE"或"HUGGINGFACE";默认下载路径为 ``~/``(即用户主目录),用户可以通过配置环境变量 ``FD_MODEL_CACHE``修改默认下载的路径,例如
|
||||
|
||||
```
|
||||
export FD_MODEL_SOURCE=AISTUDIO # "AISTUDIO", "MODELSCOPE" or "HUGGINGFACE"
|
||||
export FD_MODEL_CACHE=/ssd1/download_models
|
||||
```
|
||||
|
||||
|
@@ -31,7 +31,7 @@ environment_variables: dict[str, Callable[[], Any]] = {
|
||||
# Number of days to keep fastdeploy logs.
|
||||
"FD_LOG_BACKUP_COUNT": lambda: os.getenv("FD_LOG_BACKUP_COUNT", "7"),
|
||||
# Model download source, can set "AISTUDIO", "MODELSCOPE" or "HUGGINGFACE".
|
||||
"FD_MODEL_SOURCE": lambda: os.getenv("FD_MODEL_SOURCE", "MODELSCOPE"),
|
||||
"FD_MODEL_SOURCE": lambda: os.getenv("FD_MODEL_SOURCE", "AISTUDIO"),
|
||||
# Model download cache directory.
|
||||
"FD_MODEL_CACHE": lambda: os.getenv("FD_MODEL_CACHE", None),
|
||||
# Maximum number of stop sequences.
|
||||
|
@@ -32,8 +32,6 @@ from typing import Literal, TypeVar, Union
|
||||
import requests
|
||||
import yaml
|
||||
from aistudio_sdk.snapshot_download import snapshot_download as aistudio_download
|
||||
from huggingface_hub._snapshot_download import snapshot_download as huggingface_download
|
||||
from modelscope.hub.snapshot_download import snapshot_download as modelscope_download
|
||||
from tqdm import tqdm
|
||||
from typing_extensions import TypeIs, assert_never
|
||||
|
||||
@@ -496,25 +494,14 @@ def none_or_str(value):
|
||||
|
||||
def retrive_model_from_server(model_name_or_path, revision="master"):
|
||||
"""
|
||||
Download pretrained model from MODELSCOPE, AIStudio or HUGGINGFACE automatically
|
||||
Download pretrained model from AIStudio, MODELSCOPE or HUGGINGFACE automatically
|
||||
"""
|
||||
if os.path.exists(model_name_or_path):
|
||||
return model_name_or_path
|
||||
model_source = envs.FD_MODEL_SOURCE
|
||||
local_path = envs.FD_MODEL_CACHE
|
||||
repo_id = model_name_or_path
|
||||
if model_source == "MODELSCOPE":
|
||||
try:
|
||||
if repo_id.lower().strip().startswith("baidu"):
|
||||
repo_id = "PaddlePaddle" + repo_id.strip()[5:]
|
||||
if local_path is None:
|
||||
local_path = f'{os.getenv("HOME")}'
|
||||
local_path = f"{local_path}/{repo_id}/{revision}"
|
||||
modelscope_download(repo_id=repo_id, revision=revision, local_dir=local_path)
|
||||
model_name_or_path = local_path
|
||||
except Exception:
|
||||
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.")
|
||||
elif model_source == "AISTUDIO":
|
||||
if model_source == "AISTUDIO":
|
||||
try:
|
||||
if repo_id.lower().strip().startswith("baidu"):
|
||||
repo_id = "PaddlePaddle" + repo_id.strip()[5:]
|
||||
@@ -525,8 +512,27 @@ def retrive_model_from_server(model_name_or_path, revision="master"):
|
||||
model_name_or_path = local_path
|
||||
except Exception:
|
||||
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.")
|
||||
elif model_source == "MODELSCOPE":
|
||||
try:
|
||||
from modelscope.hub.snapshot_download import (
|
||||
snapshot_download as modelscope_download,
|
||||
)
|
||||
|
||||
if repo_id.lower().strip().startswith("baidu"):
|
||||
repo_id = "PaddlePaddle" + repo_id.strip()[5:]
|
||||
if local_path is None:
|
||||
local_path = f'{os.getenv("HOME")}'
|
||||
local_path = f"{local_path}/{repo_id}/{revision}"
|
||||
modelscope_download(repo_id=repo_id, revision=revision, local_dir=local_path)
|
||||
model_name_or_path = local_path
|
||||
except Exception:
|
||||
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.")
|
||||
elif model_source == "HUGGINGFACE":
|
||||
try:
|
||||
from huggingface_hub._snapshot_download import (
|
||||
snapshot_download as huggingface_download,
|
||||
)
|
||||
|
||||
if revision == "master":
|
||||
revision = "main"
|
||||
repo_id = model_name_or_path
|
||||
|
@@ -30,7 +30,6 @@ use-triton-in-paddle
|
||||
crcmod
|
||||
fastsafetensors==0.1.14
|
||||
msgpack
|
||||
modelscope
|
||||
opentelemetry-api>=1.24.0
|
||||
opentelemetry-sdk>=1.24.0
|
||||
opentelemetry-instrumentation-redis
|
||||
|
Reference in New Issue
Block a user