[Feature] multi source download (#3125)
Some checks failed
Deploy GitHub Pages / deploy (push) Has been cancelled

* multi-source download

* multi-source download

* huggingface download revision

* requirement

* style

* add revision arg

* test

* pre-commit

* Change default download

* change requirements.txt

* modify English Documentation

* documentation

* modify model download path

* add requirements

* error optimization

* 连接失败兜底

* 连接失败兜底

* 连接失败兜底

* unit test

* unit test

* unit test

* test

* test
This commit is contained in:
Yzc216
2025-08-07 00:40:27 +08:00
committed by GitHub
parent 9408e667a5
commit d9e3f88f9e
4 changed files with 57 additions and 18 deletions

View File

@@ -532,7 +532,15 @@ def retrive_model_from_server(model_name_or_path, revision="master"):
aistudio_download(repo_id=repo_id, revision=revision, local_dir=local_path) aistudio_download(repo_id=repo_id, revision=revision, local_dir=local_path)
model_name_or_path = local_path model_name_or_path = local_path
except Exception: except Exception:
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") if os.path.exists(local_path):
llm_logger.error(
f"Failed to connect to aistudio, but detected that the model directory {local_path} exists. Attempting to start."
)
return local_path
else:
raise Exception(
f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision."
)
elif model_source == "MODELSCOPE": elif model_source == "MODELSCOPE":
try: try:
from modelscope.hub.snapshot_download import ( from modelscope.hub.snapshot_download import (
@@ -547,7 +555,9 @@ def retrive_model_from_server(model_name_or_path, revision="master"):
modelscope_download(repo_id=repo_id, revision=revision, local_dir=local_path) modelscope_download(repo_id=repo_id, revision=revision, local_dir=local_path)
model_name_or_path = local_path model_name_or_path = local_path
except Exception: except Exception:
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") raise Exception(
f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision."
)
elif model_source == "HUGGINGFACE": elif model_source == "HUGGINGFACE":
try: try:
from huggingface_hub._snapshot_download import ( from huggingface_hub._snapshot_download import (
@@ -565,7 +575,9 @@ def retrive_model_from_server(model_name_or_path, revision="master"):
huggingface_download(repo_id=repo_id, revision=revision, local_dir=local_path) huggingface_download(repo_id=repo_id, revision=revision, local_dir=local_path)
model_name_or_path = local_path model_name_or_path = local_path
except Exception: except Exception:
raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") raise Exception(
f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision."
)
else: else:
raise ValueError( raise ValueError(
f"Unsupported model source: {model_source}, please choose one of ['MODELSCOPE', 'AISTUDIO', 'HUGGINGFACE']" f"Unsupported model source: {model_source}, please choose one of ['MODELSCOPE', 'AISTUDIO', 'HUGGINGFACE']"

View File

@@ -30,6 +30,7 @@ use-triton-in-paddle
crcmod crcmod
fastsafetensors==0.1.14 fastsafetensors==0.1.14
msgpack msgpack
modelscope
opentelemetry-api>=1.24.0 opentelemetry-api>=1.24.0
opentelemetry-sdk>=1.24.0 opentelemetry-sdk>=1.24.0
opentelemetry-instrumentation-redis opentelemetry-instrumentation-redis

View File

@@ -38,7 +38,6 @@ disabled_tests=(
operators/test_fused_moe.py operators/test_fused_moe.py
layers/test_repetition_early_stopper.py layers/test_repetition_early_stopper.py
operators/test_stop_generation_multi_ends.py operators/test_stop_generation_multi_ends.py
utils/test_download.py
graph_optimization/test_cuda_graph.py graph_optimization/test_cuda_graph.py
) )
is_disabled() { is_disabled() {

View File

@@ -5,19 +5,14 @@ from fastdeploy.utils import retrive_model_from_server
class TestAistudioDownload(unittest.TestCase): class TestAistudioDownload(unittest.TestCase):
def test_retrive_model_from_server_MODELSCOPE(self): """
os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE" Test cases for downloading models from different sources using FastDeploy utilities.
os.environ["FD_MODEL_CACHE"] = "./models" """
model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT"
revision = "master"
expected_path = f"./models/PaddlePaddle/ERNIE-4.5-0.3B-PT/{revision}"
result = retrive_model_from_server(model_name_or_path, revision)
self.assertEqual(expected_path, result)
os.environ.clear()
def test_retrive_model_from_server_unsupported_source(self): def test_retrive_model_from_server_unsupported_source(self):
"""
Test case for retrieving a model from an unsupported source.
"""
os.environ["FD_MODEL_SOURCE"] = "UNSUPPORTED_SOURCE" os.environ["FD_MODEL_SOURCE"] = "UNSUPPORTED_SOURCE"
os.environ["FD_MODEL_CACHE"] = "./models" os.environ["FD_MODEL_CACHE"] = "./models"
@@ -27,17 +22,49 @@ class TestAistudioDownload(unittest.TestCase):
os.environ.clear() os.environ.clear()
def test_retrive_model_from_server_model_not_exist(self): def test_retrive_model_from_modelscope_server_model_not_exist(self):
"""
Test case for retrieving a model from ModelScope server when it doesn't exist.
"""
os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE" os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE"
os.environ["FD_MODEL_CACHE"] = "./models" os.environ["FD_MODEL_CACHE"] = "./model"
model_name_or_path = "non_existing_model" model_name_or_path = "non_existing_model_modelscope"
with self.assertRaises(Exception): with self.assertRaises(Exception):
retrive_model_from_server(model_name_or_path) retrive_model_from_server(model_name_or_path)
os.environ.clear() os.environ.clear()
def test_retrive_model_from_huggingface_server_model_not_exist(self):
"""
Test case for retrieving a model from Hugging Face server when it doesn't exist.
"""
os.environ["FD_MODEL_SOURCE"] = "HUGGINGFACE"
os.environ["FD_MODEL_CACHE"] = "./models"
model_name_or_path = "non_existing_model_hf"
with self.assertRaises(Exception):
retrive_model_from_server(model_name_or_path)
os.environ.clear()
def test_retrive_model_from_aistudio_server_(self):
"""
Test case for retrieving a model from AI Studio server.
"""
os.environ["FD_MODEL_SOURCE"] = "AISTUDIO"
os.environ["FD_MODEL_CACHE"] = "./models"
model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT"
revision = "aaa"
expected_path = "./models/PaddlePaddle/ERNIE-4.5-0.3B-PT"
result = retrive_model_from_server(model_name_or_path, revision)
self.assertEqual(expected_path, result)
os.environ.clear()
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()